diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index fcf707fef3d..ce89d8c2b10 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -32,7 +32,7 @@ jobs: fetch-depth: 0 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} @@ -116,7 +116,7 @@ jobs: - name: Set up Python ${{ env.DEFAULT_PYTHON }} if: needs.init.outputs.channel == 'dev' - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} @@ -457,7 +457,7 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 2b1606568b5..a843133f1a5 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -40,7 +40,7 @@ env: CACHE_VERSION: 12 UV_CACHE_VERSION: 1 MYPY_CACHE_VERSION: 9 - HA_SHORT_VERSION: "2025.4" + HA_SHORT_VERSION: "2025.5" DEFAULT_PYTHON: "3.13" ALL_PYTHON_VERSIONS: "['3.13']" # 10.3 is the oldest supported version @@ -249,7 +249,7 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -294,7 +294,7 @@ jobs: - name: Check out code from GitHub uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 id: python with: python-version: ${{ env.DEFAULT_PYTHON }} @@ -334,7 +334,7 @@ jobs: - name: Check out code from GitHub uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 id: python with: python-version: ${{ env.DEFAULT_PYTHON }} @@ -374,7 +374,7 @@ jobs: - name: Check out code from GitHub uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 id: python with: python-version: ${{ env.DEFAULT_PYTHON }} @@ -484,7 +484,7 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ matrix.python-version }} check-latest: true @@ -587,7 +587,7 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -620,7 +620,7 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -677,7 +677,7 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ matrix.python-version }} check-latest: true @@ -720,7 +720,7 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -767,7 +767,7 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -812,7 +812,7 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -889,7 +889,7 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -949,7 +949,7 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ matrix.python-version }} check-latest: true @@ -1074,7 +1074,7 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ matrix.python-version }} check-latest: true @@ -1208,7 +1208,7 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ matrix.python-version }} check-latest: true @@ -1359,7 +1359,7 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ matrix.python-version }} check-latest: true diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index f4d4144243c..bd072752d16 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -24,11 +24,11 @@ jobs: uses: actions/checkout@v4.2.2 - name: Initialize CodeQL - uses: github/codeql-action/init@v3.28.12 + uses: github/codeql-action/init@v3.28.13 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3.28.12 + uses: github/codeql-action/analyze@v3.28.13 with: category: "/language:python" diff --git a/.github/workflows/translations.yml b/.github/workflows/translations.yml index 619d83aef51..0b6abe8fe2c 100644 --- a/.github/workflows/translations.yml +++ b/.github/workflows/translations.yml @@ -22,7 +22,7 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index cdf0c07cccf..d27a62bab80 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -36,7 +36,7 @@ jobs: - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -159,7 +159,7 @@ jobs: sed -i "/uv/d" requirements_diff.txt - name: Build wheels - uses: home-assistant/wheels@2025.02.0 + uses: home-assistant/wheels@2025.03.0 with: abi: ${{ matrix.abi }} tag: musllinux_1_2 @@ -219,7 +219,7 @@ jobs: sed -i "/uv/d" requirements_diff.txt - name: Build wheels - uses: home-assistant/wheels@2025.02.0 + uses: home-assistant/wheels@2025.03.0 with: abi: ${{ matrix.abi }} tag: musllinux_1_2 diff --git a/.strict-typing b/.strict-typing index 0e00c2e9e07..e0c4e569f4b 100644 --- a/.strict-typing +++ b/.strict-typing @@ -119,6 +119,7 @@ homeassistant.components.bluetooth_adapters.* homeassistant.components.bluetooth_tracker.* homeassistant.components.bmw_connected_drive.* homeassistant.components.bond.* +homeassistant.components.bosch_alarm.* homeassistant.components.braviatv.* homeassistant.components.bring.* homeassistant.components.brother.* diff --git a/CODEOWNERS b/CODEOWNERS index 1835e6d0be4..8afd3bab028 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -216,6 +216,8 @@ build.json @home-assistant/supervisor /tests/components/bmw_connected_drive/ @gerard33 @rikroe /homeassistant/components/bond/ @bdraco @prystupa @joshs85 @marciogranzotto /tests/components/bond/ @bdraco @prystupa @joshs85 @marciogranzotto +/homeassistant/components/bosch_alarm/ @mag1024 @sanjay900 +/tests/components/bosch_alarm/ @mag1024 @sanjay900 /homeassistant/components/bosch_shc/ @tschamm /tests/components/bosch_shc/ @tschamm /homeassistant/components/braviatv/ @bieniu @Drafteed @@ -1183,6 +1185,8 @@ build.json @home-assistant/supervisor /tests/components/prusalink/ @balloob /homeassistant/components/ps4/ @ktnrg45 /tests/components/ps4/ @ktnrg45 +/homeassistant/components/pterodactyl/ @elmurato +/tests/components/pterodactyl/ @elmurato /homeassistant/components/pure_energie/ @klaasnicolaas /tests/components/pure_energie/ @klaasnicolaas /homeassistant/components/purpleair/ @bachya @@ -1476,8 +1480,6 @@ build.json @home-assistant/supervisor /tests/components/suez_water/ @ooii @jb101010-2 /homeassistant/components/sun/ @Swamp-Ig /tests/components/sun/ @Swamp-Ig -/homeassistant/components/sunweg/ @rokam -/tests/components/sunweg/ @rokam /homeassistant/components/supla/ @mwegrzynek /homeassistant/components/surepetcare/ @benleb @danielhiversen /tests/components/surepetcare/ @benleb @danielhiversen diff --git a/Dockerfile b/Dockerfile index 2efb9d59a44..0a74e0a3aac 100644 --- a/Dockerfile +++ b/Dockerfile @@ -31,7 +31,7 @@ RUN \ && go2rtc --version # Install uv -RUN pip3 install uv==0.6.8 +RUN pip3 install uv==0.6.10 WORKDIR /usr/src diff --git a/build.yaml b/build.yaml index cd54e410493..87dad1bf5ef 100644 --- a/build.yaml +++ b/build.yaml @@ -19,4 +19,4 @@ labels: org.opencontainers.image.authors: The Home Assistant Authors org.opencontainers.image.url: https://www.home-assistant.io/ org.opencontainers.image.documentation: https://www.home-assistant.io/docs/ - org.opencontainers.image.licenses: Apache License 2.0 + org.opencontainers.image.licenses: Apache-2.0 diff --git a/homeassistant/brands/bosch.json b/homeassistant/brands/bosch.json new file mode 100644 index 00000000000..090cc2af7c3 --- /dev/null +++ b/homeassistant/brands/bosch.json @@ -0,0 +1,5 @@ +{ + "domain": "bosch", + "name": "Bosch", + "integrations": ["bosch_alarm", "bosch_shc", "home_connect"] +} diff --git a/homeassistant/brands/motionblinds.json b/homeassistant/brands/motionblinds.json index 67013e75966..5a48b573b4d 100644 --- a/homeassistant/brands/motionblinds.json +++ b/homeassistant/brands/motionblinds.json @@ -1,5 +1,6 @@ { "domain": "motionblinds", "name": "Motionblinds", - "integrations": ["motion_blinds", "motionblinds_ble"] + "integrations": ["motion_blinds", "motionblinds_ble"], + "iot_standards": ["matter"] } diff --git a/homeassistant/components/airgradient/strings.json b/homeassistant/components/airgradient/strings.json index 2d9b6be529d..cef4db57358 100644 --- a/homeassistant/components/airgradient/strings.json +++ b/homeassistant/components/airgradient/strings.json @@ -68,8 +68,8 @@ "led_bar_mode": { "name": "LED bar mode", "state": { - "off": "Off", - "co2": "Carbon dioxide", + "off": "[%key:common::state::off%]", + "co2": "[%key:component::sensor::entity_component::carbon_dioxide::name%]", "pm": "Particulate matter" } }, @@ -143,8 +143,8 @@ "led_bar_mode": { "name": "[%key:component::airgradient::entity::select::led_bar_mode::name%]", "state": { - "off": "[%key:component::airgradient::entity::select::led_bar_mode::state::off%]", - "co2": "[%key:component::airgradient::entity::select::led_bar_mode::state::co2%]", + "off": "[%key:common::state::off%]", + "co2": "[%key:component::sensor::entity_component::carbon_dioxide::name%]", "pm": "[%key:component::airgradient::entity::select::led_bar_mode::state::pm%]" } }, diff --git a/homeassistant/components/airnow/coordinator.py b/homeassistant/components/airnow/coordinator.py index ee5bf4a1dd7..1e73bc7551e 100644 --- a/homeassistant/components/airnow/coordinator.py +++ b/homeassistant/components/airnow/coordinator.py @@ -8,7 +8,7 @@ from aiohttp import ClientSession from aiohttp.client_exceptions import ClientConnectorError from pyairnow import WebServiceAPI from pyairnow.conv import aqi_to_concentration -from pyairnow.errors import AirNowError +from pyairnow.errors import AirNowError, InvalidJsonError from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -79,7 +79,7 @@ class AirNowDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): distance=self.distance, ) - except (AirNowError, ClientConnectorError) as error: + except (AirNowError, ClientConnectorError, InvalidJsonError) as error: raise UpdateFailed(error) from error if not obs: diff --git a/homeassistant/components/airthings_ble/config_flow.py b/homeassistant/components/airthings_ble/config_flow.py index 3e7b659bff1..2d32fa6e7df 100644 --- a/homeassistant/components/airthings_ble/config_flow.py +++ b/homeassistant/components/airthings_ble/config_flow.py @@ -102,7 +102,8 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): device = await self._get_device_data(discovery_info) except AirthingsDeviceUpdateError: return self.async_abort(reason="cannot_connect") - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unknown error occurred") return self.async_abort(reason="unknown") name = get_name(device) @@ -160,7 +161,8 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): device = await self._get_device_data(discovery_info) except AirthingsDeviceUpdateError: return self.async_abort(reason="cannot_connect") - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unknown error occurred") return self.async_abort(reason="unknown") name = get_name(device) self._discovered_devices[address] = Discovery(name, discovery_info, device) diff --git a/homeassistant/components/airtouch5/config_flow.py b/homeassistant/components/airtouch5/config_flow.py index d96aaed96b7..38c85e45fb8 100644 --- a/homeassistant/components/airtouch5/config_flow.py +++ b/homeassistant/components/airtouch5/config_flow.py @@ -32,7 +32,8 @@ class AirTouch5ConfigFlow(ConfigFlow, domain=DOMAIN): client = Airtouch5SimpleClient(user_input[CONF_HOST]) try: await client.test_connection() - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors = {"base": "cannot_connect"} else: await self.async_set_unique_id(user_input[CONF_HOST]) diff --git a/homeassistant/components/airvisual/strings.json b/homeassistant/components/airvisual/strings.json index 148b1368a19..7a5f8b1d5c7 100644 --- a/homeassistant/components/airvisual/strings.json +++ b/homeassistant/components/airvisual/strings.json @@ -2,7 +2,7 @@ "config": { "step": { "geography_by_coords": { - "title": "Configure a Geography", + "title": "Configure a geography", "description": "Use the AirVisual cloud API to monitor a latitude/longitude.", "data": { "api_key": "[%key:common::config_flow::data::api_key%]", @@ -56,12 +56,12 @@ "sensor": { "pollutant_label": { "state": { - "co": "Carbon Monoxide", - "n2": "Nitrogen Dioxide", + "co": "Carbon monoxide", + "n2": "Nitrogen dioxide", "o3": "Ozone", "p1": "PM10", "p2": "PM2.5", - "s2": "Sulfur Dioxide" + "s2": "Sulfur dioxide" } }, "pollutant_level": { diff --git a/homeassistant/components/airzone_cloud/manifest.json b/homeassistant/components/airzone_cloud/manifest.json index 0e21e57ec52..3b6f94df57c 100644 --- a/homeassistant/components/airzone_cloud/manifest.json +++ b/homeassistant/components/airzone_cloud/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/airzone_cloud", "iot_class": "cloud_push", "loggers": ["aioairzone_cloud"], - "requirements": ["aioairzone-cloud==0.6.10"] + "requirements": ["aioairzone-cloud==0.6.11"] } diff --git a/homeassistant/components/airzone_cloud/strings.json b/homeassistant/components/airzone_cloud/strings.json index 6e0f9adcd66..5dbd4384386 100644 --- a/homeassistant/components/airzone_cloud/strings.json +++ b/homeassistant/components/airzone_cloud/strings.json @@ -32,8 +32,8 @@ "air_quality": { "name": "Air Quality mode", "state": { - "off": "Off", - "on": "On", + "off": "[%key:common::state::off%]", + "on": "[%key:common::state::on%]", "auto": "Auto" } }, diff --git a/homeassistant/components/alexa/capabilities.py b/homeassistant/components/alexa/capabilities.py index e70055c20b1..897037987a7 100644 --- a/homeassistant/components/alexa/capabilities.py +++ b/homeassistant/components/alexa/capabilities.py @@ -1438,7 +1438,7 @@ class AlexaModeController(AlexaCapability): # Fan preset_mode if self.instance == f"{fan.DOMAIN}.{fan.ATTR_PRESET_MODE}": mode = self.entity.attributes.get(fan.ATTR_PRESET_MODE, None) - if mode in self.entity.attributes.get(fan.ATTR_PRESET_MODES, None): + if mode in self.entity.attributes.get(fan.ATTR_PRESET_MODES, ()): return f"{fan.ATTR_PRESET_MODE}.{mode}" # Humidifier mode diff --git a/homeassistant/components/amazon_polly/manifest.json b/homeassistant/components/amazon_polly/manifest.json index e7fbf8edc74..f684292d9a2 100644 --- a/homeassistant/components/amazon_polly/manifest.json +++ b/homeassistant/components/amazon_polly/manifest.json @@ -6,5 +6,5 @@ "iot_class": "cloud_push", "loggers": ["boto3", "botocore", "s3transfer"], "quality_scale": "legacy", - "requirements": ["boto3==1.34.131"] + "requirements": ["boto3==1.37.1"] } diff --git a/homeassistant/components/ambient_network/sensor.py b/homeassistant/components/ambient_network/sensor.py index 9ec6db6ff45..b96da9863a1 100644 --- a/homeassistant/components/ambient_network/sensor.py +++ b/homeassistant/components/ambient_network/sensor.py @@ -240,6 +240,7 @@ SENSOR_DESCRIPTIONS = ( suggested_display_precision=0, entity_registry_enabled_default=False, device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), SensorEntityDescription( key=TYPE_WINDGUSTMPH, diff --git a/homeassistant/components/ambient_station/sensor.py b/homeassistant/components/ambient_station/sensor.py index 730b798bd15..1b4334774d4 100644 --- a/homeassistant/components/ambient_station/sensor.py +++ b/homeassistant/components/ambient_station/sensor.py @@ -609,6 +609,7 @@ SENSOR_DESCRIPTIONS = ( translation_key="wind_direction", native_unit_of_measurement=DEGREE, device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), SensorEntityDescription( key=TYPE_WINDDIR_AVG10M, diff --git a/homeassistant/components/android_ip_webcam/manifest.json b/homeassistant/components/android_ip_webcam/manifest.json index 57af567ec51..d7a9f8ad97a 100644 --- a/homeassistant/components/android_ip_webcam/manifest.json +++ b/homeassistant/components/android_ip_webcam/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/android_ip_webcam", "iot_class": "local_polling", - "requirements": ["pydroid-ipcam==2.0.0"] + "requirements": ["pydroid-ipcam==3.0.0"] } diff --git a/homeassistant/components/androidtv_remote/manifest.json b/homeassistant/components/androidtv_remote/manifest.json index 1c45e825359..89cc0fc3965 100644 --- a/homeassistant/components/androidtv_remote/manifest.json +++ b/homeassistant/components/androidtv_remote/manifest.json @@ -7,6 +7,6 @@ "integration_type": "device", "iot_class": "local_push", "loggers": ["androidtvremote2"], - "requirements": ["androidtvremote2==0.2.0"], + "requirements": ["androidtvremote2==0.2.1"], "zeroconf": ["_androidtvremote2._tcp.local."] } diff --git a/homeassistant/components/anova/config_flow.py b/homeassistant/components/anova/config_flow.py index bc4723b1dba..f382606baba 100644 --- a/homeassistant/components/anova/config_flow.py +++ b/homeassistant/components/anova/config_flow.py @@ -2,6 +2,8 @@ from __future__ import annotations +import logging + from anova_wifi import AnovaApi, InvalidLogin import voluptuous as vol @@ -11,8 +13,10 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN +_LOGGER = logging.getLogger(__name__) -class AnovaConfligFlow(ConfigFlow, domain=DOMAIN): + +class AnovaConfigFlow(ConfigFlow, domain=DOMAIN): """Sets up a config flow for Anova.""" VERSION = 1 @@ -35,7 +39,8 @@ class AnovaConfligFlow(ConfigFlow, domain=DOMAIN): await api.authenticate() except InvalidLogin: errors["base"] = "invalid_auth" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: return self.async_create_entry( diff --git a/homeassistant/components/apsystems/coordinator.py b/homeassistant/components/apsystems/coordinator.py index ca423055176..f7f1039b8a4 100644 --- a/homeassistant/components/apsystems/coordinator.py +++ b/homeassistant/components/apsystems/coordinator.py @@ -43,6 +43,7 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]): config_entry: ApSystemsConfigEntry device_version: str + battery_system: bool def __init__( self, @@ -68,6 +69,7 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]): self.api.max_power = device_info.maxPower self.api.min_power = device_info.minPower self.device_version = device_info.devVer + self.battery_system = device_info.isBatterySystem async def _async_update_data(self) -> ApSystemsSensorData: try: diff --git a/homeassistant/components/apsystems/manifest.json b/homeassistant/components/apsystems/manifest.json index a58530b05e2..934a155c500 100644 --- a/homeassistant/components/apsystems/manifest.json +++ b/homeassistant/components/apsystems/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/apsystems", "integration_type": "device", "iot_class": "local_polling", - "requirements": ["apsystems-ez1==2.4.0"] + "requirements": ["apsystems-ez1==2.5.0"] } diff --git a/homeassistant/components/apsystems/switch.py b/homeassistant/components/apsystems/switch.py index e1017f95448..5451f2885fe 100644 --- a/homeassistant/components/apsystems/switch.py +++ b/homeassistant/components/apsystems/switch.py @@ -36,6 +36,8 @@ class ApSystemsInverterSwitch(ApSystemsEntity, SwitchEntity): super().__init__(data) self._api = data.coordinator.api self._attr_unique_id = f"{data.device_id}_inverter_status" + if data.coordinator.battery_system: + self._attr_available = False async def async_update(self) -> None: """Update switch status and availability.""" diff --git a/homeassistant/components/aquacell/config_flow.py b/homeassistant/components/aquacell/config_flow.py index 1ee89035d93..277cb742486 100644 --- a/homeassistant/components/aquacell/config_flow.py +++ b/homeassistant/components/aquacell/config_flow.py @@ -60,7 +60,7 @@ class AquaCellConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" except AuthenticationFailed: errors["base"] = "invalid_auth" - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: diff --git a/homeassistant/components/arwn/sensor.py b/homeassistant/components/arwn/sensor.py index a31156bbba6..4cc4feed2d4 100644 --- a/homeassistant/components/arwn/sensor.py +++ b/homeassistant/components/arwn/sensor.py @@ -6,7 +6,11 @@ import logging from typing import Any from homeassistant.components import mqtt -from homeassistant.components.sensor import SensorDeviceClass, SensorEntity +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorStateClass, +) from homeassistant.const import DEGREE, UnitOfPrecipitationDepth, UnitOfTemperature from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -98,6 +102,7 @@ def discover_sensors(topic: str, payload: dict[str, Any]) -> list[ArwnSensor] | DEGREE, "mdi:compass", device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), ] return None @@ -178,6 +183,7 @@ class ArwnSensor(SensorEntity): units: str, icon: str | None = None, device_class: SensorDeviceClass | None = None, + state_class: SensorStateClass | None = None, ) -> None: """Initialize the sensor.""" self.entity_id = _slug(name) @@ -188,6 +194,7 @@ class ArwnSensor(SensorEntity): self._attr_native_unit_of_measurement = units self._attr_icon = icon self._attr_device_class = device_class + self._attr_state_class = state_class def set_event(self, event: dict[str, Any]) -> None: """Update the sensor with the most recent event.""" diff --git a/homeassistant/components/assist_pipeline/pipeline.py b/homeassistant/components/assist_pipeline/pipeline.py index 42bb2d4ced8..a205db4e615 100644 --- a/homeassistant/components/assist_pipeline/pipeline.py +++ b/homeassistant/components/assist_pipeline/pipeline.py @@ -125,7 +125,7 @@ SAVE_DELAY = 10 @callback def _async_local_fallback_intent_filter(result: RecognizeResult) -> bool: """Filter out intents that are not local fallback.""" - return result.intent.name in (intent.INTENT_GET_STATE, intent.INTENT_NEVERMIND) + return result.intent.name in (intent.INTENT_GET_STATE) @callback diff --git a/homeassistant/components/assist_satellite/__init__.py b/homeassistant/components/assist_satellite/__init__.py index 038ff517264..bc2157b10b2 100644 --- a/homeassistant/components/assist_satellite/__init__.py +++ b/homeassistant/components/assist_satellite/__init__.py @@ -1,9 +1,11 @@ """Base class for assist satellite entities.""" import logging +from pathlib import Path import voluptuous as vol +from homeassistant.components.http import StaticPathConfig from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv @@ -15,6 +17,8 @@ from .const import ( CONNECTION_TEST_DATA, DATA_COMPONENT, DOMAIN, + PREANNOUNCE_FILENAME, + PREANNOUNCE_URL, AssistSatelliteEntityFeature, ) from .entity import ( @@ -56,6 +60,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: { vol.Optional("message"): str, vol.Optional("media_id"): str, + vol.Optional("preannounce_media_id"): vol.Any(str, None), } ), cv.has_at_least_one_key("message", "media_id"), @@ -70,6 +75,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: { vol.Optional("start_message"): str, vol.Optional("start_media_id"): str, + vol.Optional("preannounce_media_id"): vol.Any(str, None), vol.Optional("extra_system_prompt"): str, } ), @@ -82,6 +88,15 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async_register_websocket_api(hass) hass.http.register_view(ConnectionTestView()) + # Default preannounce sound + await hass.http.async_register_static_paths( + [ + StaticPathConfig( + PREANNOUNCE_URL, str(Path(__file__).parent / PREANNOUNCE_FILENAME) + ) + ] + ) + return True diff --git a/homeassistant/components/assist_satellite/const.py b/homeassistant/components/assist_satellite/const.py index f7ac7e524b4..7fca88f3b12 100644 --- a/homeassistant/components/assist_satellite/const.py +++ b/homeassistant/components/assist_satellite/const.py @@ -20,6 +20,9 @@ CONNECTION_TEST_DATA: HassKey[dict[str, asyncio.Event]] = HassKey( f"{DOMAIN}_connection_tests" ) +PREANNOUNCE_FILENAME = "preannounce.mp3" +PREANNOUNCE_URL = f"/api/assist_satellite/static/{PREANNOUNCE_FILENAME}" + class AssistSatelliteEntityFeature(IntFlag): """Supported features of Assist satellite entity.""" diff --git a/homeassistant/components/assist_satellite/entity.py b/homeassistant/components/assist_satellite/entity.py index 3db38a23889..7b4c1b92d8c 100644 --- a/homeassistant/components/assist_satellite/entity.py +++ b/homeassistant/components/assist_satellite/entity.py @@ -28,7 +28,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import chat_session, entity from homeassistant.helpers.entity import EntityDescription -from .const import AssistSatelliteEntityFeature +from .const import PREANNOUNCE_URL, AssistSatelliteEntityFeature from .errors import AssistSatelliteError, SatelliteBusyError _LOGGER = logging.getLogger(__name__) @@ -101,6 +101,9 @@ class AssistSatelliteAnnouncement: media_id_source: Literal["url", "media_id", "tts"] """Source of the media ID.""" + preannounce_media_id: str | None = None + """Media ID to be played before announcement.""" + class AssistSatelliteEntity(entity.Entity): """Entity encapsulating the state and functionality of an Assist satellite.""" @@ -177,6 +180,7 @@ class AssistSatelliteEntity(entity.Entity): self, message: str | None = None, media_id: str | None = None, + preannounce_media_id: str | None = PREANNOUNCE_URL, ) -> None: """Play and show an announcement on the satellite. @@ -186,6 +190,9 @@ class AssistSatelliteEntity(entity.Entity): If media_id is provided, it is played directly. It is possible to omit the message and the satellite will not show any text. + If preannounce_media_id is provided, it overrides the default sound. + If preannounce_media_id is None, no sound is played. + Calls async_announce with message and media id. """ await self._cancel_running_pipeline() @@ -193,7 +200,9 @@ class AssistSatelliteEntity(entity.Entity): if message is None: message = "" - announcement = await self._resolve_announcement_media_id(message, media_id) + announcement = await self._resolve_announcement_media_id( + message, media_id, preannounce_media_id + ) if self._is_announcing: raise SatelliteBusyError @@ -220,6 +229,7 @@ class AssistSatelliteEntity(entity.Entity): start_message: str | None = None, start_media_id: str | None = None, extra_system_prompt: str | None = None, + preannounce_media_id: str | None = PREANNOUNCE_URL, ) -> None: """Start a conversation from the satellite. @@ -229,6 +239,9 @@ class AssistSatelliteEntity(entity.Entity): If start_media_id is provided, it is played directly. It is possible to omit the message and the satellite will not show any text. + If preannounce_media_id is provided, it is played before the announcement. + If preannounce_media_id is None, no sound is played. + Calls async_start_conversation. """ await self._cancel_running_pipeline() @@ -244,13 +257,15 @@ class AssistSatelliteEntity(entity.Entity): start_message = "" announcement = await self._resolve_announcement_media_id( - start_message, start_media_id + start_message, start_media_id, preannounce_media_id ) if self._is_announcing: raise SatelliteBusyError self._is_announcing = True + self._set_state(AssistSatelliteState.RESPONDING) + # Provide our start info to the LLM so it understands context of incoming message if extra_system_prompt is not None: self._extra_system_prompt = extra_system_prompt @@ -280,6 +295,7 @@ class AssistSatelliteEntity(entity.Entity): raise finally: self._is_announcing = False + self._set_state(AssistSatelliteState.IDLE) async def async_start_conversation( self, start_announcement: AssistSatelliteAnnouncement @@ -470,7 +486,10 @@ class AssistSatelliteEntity(entity.Entity): return vad.VadSensitivity.to_seconds(vad_sensitivity) async def _resolve_announcement_media_id( - self, message: str, media_id: str | None + self, + message: str, + media_id: str | None, + preannounce_media_id: str | None = None, ) -> AssistSatelliteAnnouncement: """Resolve the media ID.""" media_id_source: Literal["url", "media_id", "tts"] | None = None @@ -478,7 +497,6 @@ class AssistSatelliteEntity(entity.Entity): if media_id: original_media_id = media_id - else: media_id_source = "tts" # Synthesize audio and get URL @@ -530,10 +548,26 @@ class AssistSatelliteEntity(entity.Entity): # Resolve to full URL media_id = async_process_play_media_url(self.hass, media_id) + # Resolve preannounce media id + if preannounce_media_id: + if media_source.is_media_source_id(preannounce_media_id): + preannounce_media = await media_source.async_resolve_media( + self.hass, + preannounce_media_id, + None, + ) + preannounce_media_id = preannounce_media.url + + # Resolve to full URL + preannounce_media_id = async_process_play_media_url( + self.hass, preannounce_media_id + ) + return AssistSatelliteAnnouncement( message=message, media_id=media_id, original_media_id=original_media_id, tts_token=tts_token, media_id_source=media_id_source, + preannounce_media_id=preannounce_media_id, ) diff --git a/homeassistant/components/assist_satellite/preannounce.mp3 b/homeassistant/components/assist_satellite/preannounce.mp3 new file mode 100644 index 00000000000..6e2fa0aba3e Binary files /dev/null and b/homeassistant/components/assist_satellite/preannounce.mp3 differ diff --git a/homeassistant/components/assist_satellite/services.yaml b/homeassistant/components/assist_satellite/services.yaml index 89a20ada6f3..7d334d6a8db 100644 --- a/homeassistant/components/assist_satellite/services.yaml +++ b/homeassistant/components/assist_satellite/services.yaml @@ -8,12 +8,17 @@ announce: message: required: false example: "Time to wake up!" + default: "" selector: text: media_id: required: false selector: text: + preannounce_media_id: + required: false + selector: + text: start_conversation: target: entity: @@ -24,6 +29,7 @@ start_conversation: start_message: required: false example: "You left the lights on in the living room. Turn them off?" + default: "" selector: text: start_media_id: @@ -34,3 +40,7 @@ start_conversation: required: false selector: text: + preannounce_media_id: + required: false + selector: + text: diff --git a/homeassistant/components/assist_satellite/strings.json b/homeassistant/components/assist_satellite/strings.json index fa2dc984ab7..2bb61516bca 100644 --- a/homeassistant/components/assist_satellite/strings.json +++ b/homeassistant/components/assist_satellite/strings.json @@ -23,6 +23,10 @@ "media_id": { "name": "Media ID", "description": "The media ID to announce instead of using text-to-speech." + }, + "preannounce_media_id": { + "name": "Preannounce Media ID", + "description": "The media ID to play before the announcement." } } }, @@ -41,6 +45,10 @@ "extra_system_prompt": { "name": "Extra system prompt", "description": "Provide background information to the AI about the request." + }, + "preannounce_media_id": { + "name": "Preannounce Media ID", + "description": "The media ID to play before the start message or media." } } } diff --git a/homeassistant/components/assist_satellite/websocket_api.py b/homeassistant/components/assist_satellite/websocket_api.py index 4fc1708b866..0a95880706a 100644 --- a/homeassistant/components/assist_satellite/websocket_api.py +++ b/homeassistant/components/assist_satellite/websocket_api.py @@ -198,7 +198,8 @@ async def websocket_test_connection( hass.async_create_background_task( satellite.async_internal_announce( - media_id=f"{CONNECTION_TEST_URL_BASE}/{connection_id}" + media_id=f"{CONNECTION_TEST_URL_BASE}/{connection_id}", + preannounce_media_id=None, ), f"assist_satellite_connection_test_{msg['entity_id']}", ) diff --git a/homeassistant/components/aws/manifest.json b/homeassistant/components/aws/manifest.json index 12149e4388a..92ae37c857b 100644 --- a/homeassistant/components/aws/manifest.json +++ b/homeassistant/components/aws/manifest.json @@ -6,5 +6,5 @@ "iot_class": "cloud_push", "loggers": ["aiobotocore", "botocore"], "quality_scale": "legacy", - "requirements": ["aiobotocore==2.13.1", "botocore==1.34.131"] + "requirements": ["aiobotocore==2.21.1", "botocore==1.37.1"] } diff --git a/homeassistant/components/backup/__init__.py b/homeassistant/components/backup/__init__.py index d9d1c3cc2fe..124ce8b872c 100644 --- a/homeassistant/components/backup/__init__.py +++ b/homeassistant/components/backup/__init__.py @@ -1,7 +1,9 @@ """The Backup integration.""" +from homeassistant.config_entries import SOURCE_SYSTEM +from homeassistant.const import Platform from homeassistant.core import HomeAssistant, ServiceCall -from homeassistant.helpers import config_validation as cv +from homeassistant.helpers import config_validation as cv, discovery_flow from homeassistant.helpers.backup import DATA_BACKUP from homeassistant.helpers.hassio import is_hassio from homeassistant.helpers.typing import ConfigType @@ -18,10 +20,12 @@ from .agent import ( ) from .config import BackupConfig, CreateBackupParametersDict from .const import DATA_MANAGER, DOMAIN +from .coordinator import BackupConfigEntry, BackupDataUpdateCoordinator from .http import async_register_http_views from .manager import ( BackupManager, BackupManagerError, + BackupPlatformEvent, BackupPlatformProtocol, BackupReaderWriter, BackupReaderWriterError, @@ -52,6 +56,7 @@ __all__ = [ "BackupConfig", "BackupManagerError", "BackupNotFound", + "BackupPlatformEvent", "BackupPlatformProtocol", "BackupReaderWriter", "BackupReaderWriterError", @@ -74,6 +79,8 @@ __all__ = [ "suggested_filename_from_name_date", ] +PLATFORMS = [Platform.SENSOR] + CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) @@ -128,4 +135,28 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async_register_http_views(hass) + discovery_flow.async_create_flow( + hass, DOMAIN, context={"source": SOURCE_SYSTEM}, data={} + ) + return True + + +async def async_setup_entry(hass: HomeAssistant, entry: BackupConfigEntry) -> bool: + """Set up a config entry.""" + backup_manager: BackupManager = hass.data[DATA_MANAGER] + coordinator = BackupDataUpdateCoordinator(hass, entry, backup_manager) + await coordinator.async_config_entry_first_refresh() + + entry.async_on_unload(coordinator.async_unsubscribe) + + entry.runtime_data = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: BackupConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/backup/config_flow.py b/homeassistant/components/backup/config_flow.py new file mode 100644 index 00000000000..ab1f884ea86 --- /dev/null +++ b/homeassistant/components/backup/config_flow.py @@ -0,0 +1,21 @@ +"""Config flow for Home Assistant Backup integration.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult + +from .const import DOMAIN + + +class BackupConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Home Assistant Backup.""" + + VERSION = 1 + + async def async_step_system( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + return self.async_create_entry(title="Backup", data={}) diff --git a/homeassistant/components/backup/const.py b/homeassistant/components/backup/const.py index c2070a37b2d..773deaef174 100644 --- a/homeassistant/components/backup/const.py +++ b/homeassistant/components/backup/const.py @@ -16,8 +16,8 @@ DATA_MANAGER: HassKey[BackupManager] = HassKey(DOMAIN) LOGGER = getLogger(__package__) EXCLUDE_FROM_BACKUP = [ - "__pycache__/*", - ".DS_Store", + "**/__pycache__/*", + "**/.DS_Store", ".HA_RESTORE", "*.db-shm", "*.log.*", diff --git a/homeassistant/components/backup/coordinator.py b/homeassistant/components/backup/coordinator.py new file mode 100644 index 00000000000..377f23567e0 --- /dev/null +++ b/homeassistant/components/backup/coordinator.py @@ -0,0 +1,81 @@ +"""Coordinator for Home Assistant Backup integration.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from datetime import datetime + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.backup import ( + async_subscribe_events, + async_subscribe_platform_events, +) +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + +from .const import DOMAIN, LOGGER +from .manager import ( + BackupManager, + BackupManagerState, + BackupPlatformEvent, + ManagerStateEvent, +) + +type BackupConfigEntry = ConfigEntry[BackupDataUpdateCoordinator] + + +@dataclass +class BackupCoordinatorData: + """Class to hold backup data.""" + + backup_manager_state: BackupManagerState + last_successful_automatic_backup: datetime | None + next_scheduled_automatic_backup: datetime | None + + +class BackupDataUpdateCoordinator(DataUpdateCoordinator[BackupCoordinatorData]): + """Class to retrieve backup status.""" + + config_entry: ConfigEntry + + def __init__( + self, + hass: HomeAssistant, + config_entry: ConfigEntry, + backup_manager: BackupManager, + ) -> None: + """Initialize coordinator.""" + super().__init__( + hass, + LOGGER, + config_entry=config_entry, + name=DOMAIN, + update_interval=None, + ) + self.unsubscribe: list[Callable[[], None]] = [ + async_subscribe_events(hass, self._on_event), + async_subscribe_platform_events(hass, self._on_event), + ] + + self.backup_manager = backup_manager + + @callback + def _on_event(self, event: ManagerStateEvent | BackupPlatformEvent) -> None: + """Handle new event.""" + LOGGER.debug("Received backup event: %s", event) + self.config_entry.async_create_task(self.hass, self.async_refresh()) + + async def _async_update_data(self) -> BackupCoordinatorData: + """Update backup manager data.""" + return BackupCoordinatorData( + self.backup_manager.state, + self.backup_manager.config.data.last_completed_automatic_backup, + self.backup_manager.config.data.schedule.next_automatic_backup, + ) + + @callback + def async_unsubscribe(self) -> None: + """Unsubscribe from events.""" + for unsub in self.unsubscribe: + unsub() diff --git a/homeassistant/components/backup/diagnostics.py b/homeassistant/components/backup/diagnostics.py new file mode 100644 index 00000000000..9c3e28bde5b --- /dev/null +++ b/homeassistant/components/backup/diagnostics.py @@ -0,0 +1,27 @@ +"""Diagnostics support for Home Assistant Backup integration.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.const import CONF_PASSWORD +from homeassistant.core import HomeAssistant + +from .coordinator import BackupConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: BackupConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + coordinator = entry.runtime_data + return { + "backup_agents": [ + {"name": agent.name, "agent_id": agent.agent_id} + for agent in coordinator.backup_manager.backup_agents.values() + ], + "backup_config": async_redact_data( + coordinator.backup_manager.config.data.to_dict(), [CONF_PASSWORD] + ), + } diff --git a/homeassistant/components/backup/entity.py b/homeassistant/components/backup/entity.py new file mode 100644 index 00000000000..ff7c7889dc5 --- /dev/null +++ b/homeassistant/components/backup/entity.py @@ -0,0 +1,36 @@ +"""Base for backup entities.""" + +from __future__ import annotations + +from homeassistant.const import __version__ as HA_VERSION +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import BackupDataUpdateCoordinator + + +class BackupManagerEntity(CoordinatorEntity[BackupDataUpdateCoordinator]): + """Base entity for backup manager.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: BackupDataUpdateCoordinator, + entity_description: EntityDescription, + ) -> None: + """Initialize base entity.""" + super().__init__(coordinator) + self.entity_description = entity_description + self._attr_unique_id = entity_description.key + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, "backup_manager")}, + manufacturer="Home Assistant", + model="Home Assistant Backup", + sw_version=HA_VERSION, + name="Backup", + entry_type=DeviceEntryType.SERVICE, + configuration_url="homeassistant://config/backup", + ) diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index 6dbe863185c..43a7be6db8d 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -229,6 +229,13 @@ class RestoreBackupEvent(ManagerStateEvent): state: RestoreBackupState +@dataclass(frozen=True, kw_only=True, slots=True) +class BackupPlatformEvent: + """Backup platform class.""" + + domain: str + + @dataclass(frozen=True, kw_only=True, slots=True) class BlockedEvent(ManagerStateEvent): """Backup manager blocked, Home Assistant is starting.""" @@ -355,6 +362,9 @@ class BackupManager: self._backup_event_subscriptions = hass.data[ DATA_BACKUP ].backup_event_subscriptions + self._backup_platform_event_subscriptions = hass.data[ + DATA_BACKUP + ].backup_platform_event_subscriptions async def async_setup(self) -> None: """Set up the backup manager.""" @@ -465,6 +475,9 @@ class BackupManager: LOGGER.debug("%s platforms loaded in total", len(self.platforms)) LOGGER.debug("%s agents loaded in total", len(self.backup_agents)) LOGGER.debug("%s local agents loaded in total", len(self.local_backup_agents)) + event = BackupPlatformEvent(domain=integration_domain) + for subscription in self._backup_platform_event_subscriptions: + subscription(event) async def async_pre_backup_actions(self) -> None: """Perform pre backup actions.""" @@ -1713,7 +1726,9 @@ class CoreBackupReaderWriter(BackupReaderWriter): """Filter to filter excludes.""" for exclude in excludes: - if not path.match(exclude): + # The home assistant core configuration directory is added as "data" + # in the tar file, so we need to prefix that path to the filters. + if not path.full_match(f"data/{exclude}"): continue LOGGER.debug("Ignoring %s because of %s", path, exclude) return True diff --git a/homeassistant/components/backup/manifest.json b/homeassistant/components/backup/manifest.json index db0719983b1..3c7b1e5e014 100644 --- a/homeassistant/components/backup/manifest.json +++ b/homeassistant/components/backup/manifest.json @@ -5,8 +5,9 @@ "codeowners": ["@home-assistant/core"], "dependencies": ["http", "websocket_api"], "documentation": "https://www.home-assistant.io/integrations/backup", - "integration_type": "system", + "integration_type": "service", "iot_class": "calculated", "quality_scale": "internal", - "requirements": ["cronsim==2.6", "securetar==2025.2.1"] + "requirements": ["cronsim==2.6", "securetar==2025.2.1"], + "single_config_entry": true } diff --git a/homeassistant/components/backup/sensor.py b/homeassistant/components/backup/sensor.py new file mode 100644 index 00000000000..59e98ae7c2d --- /dev/null +++ b/homeassistant/components/backup/sensor.py @@ -0,0 +1,75 @@ +"""Sensor platform for Home Assistant Backup integration.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from datetime import datetime + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .coordinator import BackupConfigEntry, BackupCoordinatorData +from .entity import BackupManagerEntity +from .manager import BackupManagerState + + +@dataclass(kw_only=True, frozen=True) +class BackupSensorEntityDescription(SensorEntityDescription): + """Description for Home Assistant Backup sensor entities.""" + + value_fn: Callable[[BackupCoordinatorData], str | datetime | None] + + +BACKUP_MANAGER_DESCRIPTIONS = ( + BackupSensorEntityDescription( + key="backup_manager_state", + translation_key="backup_manager_state", + device_class=SensorDeviceClass.ENUM, + options=[state.value for state in BackupManagerState], + value_fn=lambda data: data.backup_manager_state, + ), + BackupSensorEntityDescription( + key="next_scheduled_automatic_backup", + translation_key="next_scheduled_automatic_backup", + device_class=SensorDeviceClass.TIMESTAMP, + value_fn=lambda data: data.next_scheduled_automatic_backup, + ), + BackupSensorEntityDescription( + key="last_successful_automatic_backup", + translation_key="last_successful_automatic_backup", + device_class=SensorDeviceClass.TIMESTAMP, + value_fn=lambda data: data.last_successful_automatic_backup, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: BackupConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Sensor set up for backup config entry.""" + + coordinator = config_entry.runtime_data + + async_add_entities( + BackupManagerSensor(coordinator, description) + for description in BACKUP_MANAGER_DESCRIPTIONS + ) + + +class BackupManagerSensor(BackupManagerEntity, SensorEntity): + """Sensor to track backup manager state.""" + + entity_description: BackupSensorEntityDescription + + @property + def native_value(self) -> str | datetime | None: + """Return native value of entity.""" + return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/backup/strings.json b/homeassistant/components/backup/strings.json index c3047d3a4ac..357bcdbb72f 100644 --- a/homeassistant/components/backup/strings.json +++ b/homeassistant/components/backup/strings.json @@ -22,5 +22,24 @@ "name": "Create automatic backup", "description": "Creates a new backup with automatic backup settings." } + }, + "entity": { + "sensor": { + "backup_manager_state": { + "name": "Backup Manager state", + "state": { + "idle": "[%key:common::state::idle%]", + "create_backup": "Creating a backup", + "receive_backup": "Receiving a backup", + "restore_backup": "Restoring a backup" + } + }, + "next_scheduled_automatic_backup": { + "name": "Next scheduled automatic backup" + }, + "last_successful_automatic_backup": { + "name": "Last successful automatic backup" + } + } } } diff --git a/homeassistant/components/balay/__init__.py b/homeassistant/components/balay/__init__.py new file mode 100644 index 00000000000..e7fa8bba86d --- /dev/null +++ b/homeassistant/components/balay/__init__.py @@ -0,0 +1 @@ +"""Balay virtual integration.""" diff --git a/homeassistant/components/balay/manifest.json b/homeassistant/components/balay/manifest.json new file mode 100644 index 00000000000..98e4f521c7a --- /dev/null +++ b/homeassistant/components/balay/manifest.json @@ -0,0 +1,6 @@ +{ + "domain": "balay", + "name": "Balay", + "integration_type": "virtual", + "supported_by": "home_connect" +} diff --git a/homeassistant/components/binary_sensor/strings.json b/homeassistant/components/binary_sensor/strings.json index b86a6374f28..9fac758e168 100644 --- a/homeassistant/components/binary_sensor/strings.json +++ b/homeassistant/components/binary_sensor/strings.json @@ -132,7 +132,7 @@ "name": "Charging", "state": { "off": "Not charging", - "on": "Charging" + "on": "[%key:common::state::charging%]" } }, "carbon_monoxide": { diff --git a/homeassistant/components/blue_current/strings.json b/homeassistant/components/blue_current/strings.json index 2e48d768a74..b90a4792f65 100644 --- a/homeassistant/components/blue_current/strings.json +++ b/homeassistant/components/blue_current/strings.json @@ -37,7 +37,7 @@ "vehicle_status": { "name": "Vehicle status", "state": { - "standby": "Standby", + "standby": "[%key:common::state::standby%]", "vehicle_detected": "Detected", "ready": "Ready", "no_power": "No power", diff --git a/homeassistant/components/bmw_connected_drive/strings.json b/homeassistant/components/bmw_connected_drive/strings.json index 4b16b719d8d..bd9814476f5 100644 --- a/homeassistant/components/bmw_connected_drive/strings.json +++ b/homeassistant/components/bmw_connected_drive/strings.json @@ -6,7 +6,7 @@ "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", - "region": "ConnectedDrive Region" + "region": "ConnectedDrive region" }, "data_description": { "username": "The email address of your MyBMW/MINI Connected account.", @@ -113,10 +113,10 @@ }, "select": { "ac_limit": { - "name": "AC Charging Limit" + "name": "AC charging limit" }, "charging_mode": { - "name": "Charging Mode", + "name": "Charging mode", "state": { "immediate_charging": "Immediate charging", "delayed_charging": "Delayed charging", @@ -181,7 +181,7 @@ "cooling": "Cooling", "heating": "Heating", "inactive": "Inactive", - "standby": "Standby", + "standby": "[%key:common::state::standby%]", "ventilation": "Ventilation" } }, diff --git a/homeassistant/components/bosch_alarm/__init__.py b/homeassistant/components/bosch_alarm/__init__.py new file mode 100644 index 00000000000..bc7fee46f60 --- /dev/null +++ b/homeassistant/components/bosch_alarm/__init__.py @@ -0,0 +1,62 @@ +"""The Bosch Alarm integration.""" + +from __future__ import annotations + +from ssl import SSLError + +from bosch_alarm_mode2 import Panel + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import device_registry as dr + +from .const import CONF_INSTALLER_CODE, CONF_USER_CODE, DOMAIN + +PLATFORMS: list[Platform] = [Platform.ALARM_CONTROL_PANEL] + +type BoschAlarmConfigEntry = ConfigEntry[Panel] + + +async def async_setup_entry(hass: HomeAssistant, entry: BoschAlarmConfigEntry) -> bool: + """Set up Bosch Alarm from a config entry.""" + + panel = Panel( + host=entry.data[CONF_HOST], + port=entry.data[CONF_PORT], + automation_code=entry.data.get(CONF_PASSWORD), + installer_or_user_code=entry.data.get( + CONF_INSTALLER_CODE, entry.data.get(CONF_USER_CODE) + ), + ) + try: + await panel.connect() + except (PermissionError, ValueError) as err: + await panel.disconnect() + raise ConfigEntryNotReady from err + except (TimeoutError, OSError, ConnectionRefusedError, SSLError) as err: + await panel.disconnect() + raise ConfigEntryNotReady("Connection failed") from err + + entry.runtime_data = panel + + device_registry = dr.async_get(hass) + + device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + identifiers={(DOMAIN, entry.unique_id or entry.entry_id)}, + name=f"Bosch {panel.model}", + manufacturer="Bosch Security Systems", + model=panel.model, + sw_version=panel.firmware_version, + ) + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: BoschAlarmConfigEntry) -> bool: + """Unload a config entry.""" + if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): + await entry.runtime_data.disconnect() + return unload_ok diff --git a/homeassistant/components/bosch_alarm/alarm_control_panel.py b/homeassistant/components/bosch_alarm/alarm_control_panel.py new file mode 100644 index 00000000000..a1d8a7b90f4 --- /dev/null +++ b/homeassistant/components/bosch_alarm/alarm_control_panel.py @@ -0,0 +1,109 @@ +"""Support for Bosch Alarm Panel.""" + +from __future__ import annotations + +from bosch_alarm_mode2 import Panel + +from homeassistant.components.alarm_control_panel import ( + AlarmControlPanelEntity, + AlarmControlPanelEntityFeature, + AlarmControlPanelState, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import BoschAlarmConfigEntry +from .const import DOMAIN + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: BoschAlarmConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up control panels for each area.""" + panel = config_entry.runtime_data + + async_add_entities( + AreaAlarmControlPanel( + panel, + area_id, + config_entry.unique_id or config_entry.entry_id, + ) + for area_id in panel.areas + ) + + +class AreaAlarmControlPanel(AlarmControlPanelEntity): + """An alarm control panel entity for a bosch alarm panel.""" + + _attr_has_entity_name = True + _attr_supported_features = ( + AlarmControlPanelEntityFeature.ARM_HOME + | AlarmControlPanelEntityFeature.ARM_AWAY + ) + _attr_code_arm_required = False + _attr_name = None + + def __init__(self, panel: Panel, area_id: int, unique_id: str) -> None: + """Initialise a Bosch Alarm control panel entity.""" + self.panel = panel + self._area = panel.areas[area_id] + self._area_id = area_id + self._attr_unique_id = f"{unique_id}_area_{area_id}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self._attr_unique_id)}, + name=self._area.name, + manufacturer="Bosch Security Systems", + via_device=( + DOMAIN, + unique_id, + ), + ) + + @property + def alarm_state(self) -> AlarmControlPanelState | None: + """Return the state of the alarm.""" + if self._area.is_triggered(): + return AlarmControlPanelState.TRIGGERED + if self._area.is_disarmed(): + return AlarmControlPanelState.DISARMED + if self._area.is_arming(): + return AlarmControlPanelState.ARMING + if self._area.is_pending(): + return AlarmControlPanelState.PENDING + if self._area.is_part_armed(): + return AlarmControlPanelState.ARMED_HOME + if self._area.is_all_armed(): + return AlarmControlPanelState.ARMED_AWAY + return None + + async def async_alarm_disarm(self, code: str | None = None) -> None: + """Disarm this panel.""" + await self.panel.area_disarm(self._area_id) + + async def async_alarm_arm_home(self, code: str | None = None) -> None: + """Send arm home command.""" + await self.panel.area_arm_part(self._area_id) + + async def async_alarm_arm_away(self, code: str | None = None) -> None: + """Send arm away command.""" + await self.panel.area_arm_all(self._area_id) + + @property + def available(self) -> bool: + """Return True if entity is available.""" + return self.panel.connection_status() + + async def async_added_to_hass(self) -> None: + """Run when entity attached to hass.""" + await super().async_added_to_hass() + self._area.status_observer.attach(self.schedule_update_ha_state) + self.panel.connection_status_observer.attach(self.schedule_update_ha_state) + + async def async_will_remove_from_hass(self) -> None: + """Run when entity removed from hass.""" + await super().async_will_remove_from_hass() + self._area.status_observer.detach(self.schedule_update_ha_state) + self.panel.connection_status_observer.detach(self.schedule_update_ha_state) diff --git a/homeassistant/components/bosch_alarm/config_flow.py b/homeassistant/components/bosch_alarm/config_flow.py new file mode 100644 index 00000000000..e48f2a11944 --- /dev/null +++ b/homeassistant/components/bosch_alarm/config_flow.py @@ -0,0 +1,165 @@ +"""Config flow for Bosch Alarm integration.""" + +from __future__ import annotations + +import asyncio +import logging +import ssl +from typing import Any + +from bosch_alarm_mode2 import Panel +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import ( + CONF_CODE, + CONF_HOST, + CONF_MODEL, + CONF_PASSWORD, + CONF_PORT, +) +import homeassistant.helpers.config_validation as cv + +from .const import CONF_INSTALLER_CODE, CONF_USER_CODE, DOMAIN + +_LOGGER = logging.getLogger(__name__) + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_HOST): str, + vol.Required(CONF_PORT, default=7700): cv.positive_int, + } +) + +STEP_AUTH_DATA_SCHEMA_SOLUTION = vol.Schema( + { + vol.Required(CONF_USER_CODE): str, + } +) + +STEP_AUTH_DATA_SCHEMA_AMAX = vol.Schema( + { + vol.Required(CONF_INSTALLER_CODE): str, + vol.Required(CONF_PASSWORD): str, + } +) + +STEP_AUTH_DATA_SCHEMA_BG = vol.Schema( + { + vol.Required(CONF_PASSWORD): str, + } +) + +STEP_INIT_DATA_SCHEMA = vol.Schema({vol.Optional(CONF_CODE): str}) + + +async def try_connect( + data: dict[str, Any], load_selector: int = 0 +) -> tuple[str, int | None]: + """Validate the user input allows us to connect. + + Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user. + """ + panel = Panel( + host=data[CONF_HOST], + port=data[CONF_PORT], + automation_code=data.get(CONF_PASSWORD), + installer_or_user_code=data.get(CONF_INSTALLER_CODE, data.get(CONF_USER_CODE)), + ) + + try: + await panel.connect(load_selector) + finally: + await panel.disconnect() + + return (panel.model, panel.serial_number) + + +class BoschAlarmConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Bosch Alarm.""" + + def __init__(self) -> None: + """Init config flow.""" + + self._data: dict[str, Any] = {} + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors: dict[str, str] = {} + + if user_input is not None: + try: + # Use load_selector = 0 to fetch the panel model without authentication. + (model, serial) = await try_connect(user_input, 0) + except ( + OSError, + ConnectionRefusedError, + ssl.SSLError, + asyncio.exceptions.TimeoutError, + ) as e: + _LOGGER.error("Connection Error: %s", e) + errors["base"] = "cannot_connect" + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + self._data = user_input + self._data[CONF_MODEL] = model + return await self.async_step_auth() + return self.async_show_form( + step_id="user", + data_schema=self.add_suggested_values_to_schema( + STEP_USER_DATA_SCHEMA, user_input + ), + errors=errors, + ) + + async def async_step_auth( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the auth step.""" + errors: dict[str, str] = {} + + # Each model variant requires a different authentication flow + if "Solution" in self._data[CONF_MODEL]: + schema = STEP_AUTH_DATA_SCHEMA_SOLUTION + elif "AMAX" in self._data[CONF_MODEL]: + schema = STEP_AUTH_DATA_SCHEMA_AMAX + else: + schema = STEP_AUTH_DATA_SCHEMA_BG + + if user_input is not None: + self._data.update(user_input) + try: + (model, serial_number) = await try_connect( + self._data, Panel.LOAD_EXTENDED_INFO + ) + except (PermissionError, ValueError) as e: + errors["base"] = "invalid_auth" + _LOGGER.error("Authentication Error: %s", e) + except ( + OSError, + ConnectionRefusedError, + ssl.SSLError, + TimeoutError, + ) as e: + _LOGGER.error("Connection Error: %s", e) + errors["base"] = "cannot_connect" + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + if serial_number: + await self.async_set_unique_id(str(serial_number)) + self._abort_if_unique_id_configured() + else: + self._async_abort_entries_match({CONF_HOST: self._data[CONF_HOST]}) + return self.async_create_entry(title=f"Bosch {model}", data=self._data) + + return self.async_show_form( + step_id="auth", + data_schema=self.add_suggested_values_to_schema(schema, user_input), + errors=errors, + ) diff --git a/homeassistant/components/bosch_alarm/const.py b/homeassistant/components/bosch_alarm/const.py new file mode 100644 index 00000000000..7205831391c --- /dev/null +++ b/homeassistant/components/bosch_alarm/const.py @@ -0,0 +1,6 @@ +"""Constants for the Bosch Alarm integration.""" + +DOMAIN = "bosch_alarm" +HISTORY_ATTR = "history" +CONF_INSTALLER_CODE = "installer_code" +CONF_USER_CODE = "user_code" diff --git a/homeassistant/components/bosch_alarm/manifest.json b/homeassistant/components/bosch_alarm/manifest.json new file mode 100644 index 00000000000..a54ace71782 --- /dev/null +++ b/homeassistant/components/bosch_alarm/manifest.json @@ -0,0 +1,11 @@ +{ + "domain": "bosch_alarm", + "name": "Bosch Alarm", + "codeowners": ["@mag1024", "@sanjay900"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/bosch_alarm", + "integration_type": "device", + "iot_class": "local_push", + "quality_scale": "bronze", + "requirements": ["bosch-alarm-mode2==0.4.3"] +} diff --git a/homeassistant/components/bosch_alarm/quality_scale.yaml b/homeassistant/components/bosch_alarm/quality_scale.yaml new file mode 100644 index 00000000000..467760fb863 --- /dev/null +++ b/homeassistant/components/bosch_alarm/quality_scale.yaml @@ -0,0 +1,84 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + No custom actions defined + appropriate-polling: + status: exempt + comment: | + No polling + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + No custom actions are defined. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: done + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: | + No custom actions are defined. + config-entry-unloading: done + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: todo + integration-owner: done + log-when-unavailable: todo + parallel-updates: todo + reauthentication-flow: todo + test-coverage: done + + # Gold + devices: done + diagnostics: todo + discovery-update-info: todo + discovery: todo + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + Device type integration + entity-category: todo + entity-device-class: todo + entity-disabled-by-default: todo + entity-translations: todo + exception-translations: todo + icon-translations: todo + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + No repairs + stale-devices: + status: exempt + comment: | + Device type integration + + # Platinum + async-dependency: done + inject-websession: + status: exempt + comment: | + Integration does not make any HTTP requests. + strict-typing: done diff --git a/homeassistant/components/bosch_alarm/strings.json b/homeassistant/components/bosch_alarm/strings.json new file mode 100644 index 00000000000..f4846021b55 --- /dev/null +++ b/homeassistant/components/bosch_alarm/strings.json @@ -0,0 +1,36 @@ +{ + "config": { + "step": { + "user": { + "data": { + "host": "[%key:common::config_flow::data::host%]", + "port": "[%key:common::config_flow::data::port%]" + }, + "data_description": { + "host": "The hostname or IP address of your Bosch alarm panel", + "port": "The port used to connect to your Bosch alarm panel. This is usually 7700" + } + }, + "auth": { + "data": { + "password": "[%key:common::config_flow::data::password%]", + "installer_code": "Installer code", + "user_code": "User code" + }, + "data_description": { + "password": "The Mode 2 automation code from your panel", + "installer_code": "The installer code from your panel", + "user_code": "The user code from your panel" + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + } +} diff --git a/homeassistant/components/buienradar/sensor.py b/homeassistant/components/buienradar/sensor.py index a4d39ea07cc..586543de129 100644 --- a/homeassistant/components/buienradar/sensor.py +++ b/homeassistant/components/buienradar/sensor.py @@ -170,6 +170,7 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( native_unit_of_measurement=DEGREE, icon="mdi:compass-outline", device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), SensorEntityDescription( key="pressure", diff --git a/homeassistant/components/cambridge_audio/media_player.py b/homeassistant/components/cambridge_audio/media_player.py index d18898fa916..5322ae7d9a2 100644 --- a/homeassistant/components/cambridge_audio/media_player.py +++ b/homeassistant/components/cambridge_audio/media_player.py @@ -142,6 +142,12 @@ class CambridgeAudioDevice(CambridgeAudioEntity, MediaPlayerEntity): @property def media_artist(self) -> str | None: """Artist of current playing media, music track only.""" + if ( + not self.client.play_state.metadata.artist + and self.client.state.source == "IR" + ): + # Return channel instead of artist when playing internet radio + return self.client.play_state.metadata.station return self.client.play_state.metadata.artist @property @@ -169,6 +175,11 @@ class CambridgeAudioDevice(CambridgeAudioEntity, MediaPlayerEntity): """Last time the media position was updated.""" return self.client.position_last_updated + @property + def media_channel(self) -> str | None: + """Channel currently playing.""" + return self.client.play_state.metadata.station + @property def is_volume_muted(self) -> bool | None: """Volume mute status.""" diff --git a/homeassistant/components/cast/helpers.py b/homeassistant/components/cast/helpers.py index 7f46100afca..c45bbb4fbbc 100644 --- a/homeassistant/components/cast/helpers.py +++ b/homeassistant/components/cast/helpers.py @@ -81,7 +81,7 @@ class ChromecastInfo: "+label%3A%22integration%3A+cast%22" ) - _LOGGER.debug( + _LOGGER.info( ( "Fetched cast details for unknown model '%s' manufacturer:" " '%s', type: '%s'. Please %s" diff --git a/homeassistant/components/cast/manifest.json b/homeassistant/components/cast/manifest.json index feb613f4765..6c8b0536e2f 100644 --- a/homeassistant/components/cast/manifest.json +++ b/homeassistant/components/cast/manifest.json @@ -14,7 +14,7 @@ "documentation": "https://www.home-assistant.io/integrations/cast", "iot_class": "local_polling", "loggers": ["casttube", "pychromecast"], - "requirements": ["PyChromecast==14.0.6"], + "requirements": ["PyChromecast==14.0.7"], "single_config_entry": true, "zeroconf": ["_googlecast._tcp.local."] } diff --git a/homeassistant/components/chacon_dio/config_flow.py b/homeassistant/components/chacon_dio/config_flow.py index 54604b81153..daaf38e0edc 100644 --- a/homeassistant/components/chacon_dio/config_flow.py +++ b/homeassistant/components/chacon_dio/config_flow.py @@ -44,7 +44,7 @@ class ChaconDioConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" except DIOChaconInvalidAuthError: errors["base"] = "invalid_auth" - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" diff --git a/homeassistant/components/climate/strings.json b/homeassistant/components/climate/strings.json index 6d8b2c5449d..609eee71139 100644 --- a/homeassistant/components/climate/strings.json +++ b/homeassistant/components/climate/strings.json @@ -257,7 +257,7 @@ "selector": { "hvac_mode": { "options": { - "off": "Off", + "off": "[%key:common::state::off%]", "auto": "Auto", "cool": "Cool", "dry": "Dry", diff --git a/homeassistant/components/cloud/backup.py b/homeassistant/components/cloud/backup.py index b83c4725663..f4426eabeed 100644 --- a/homeassistant/components/cloud/backup.py +++ b/homeassistant/components/cloud/backup.py @@ -4,13 +4,14 @@ from __future__ import annotations import asyncio from collections.abc import AsyncIterator, Callable, Coroutine, Mapping +from http import HTTPStatus import logging import random from typing import Any -from aiohttp import ClientError +from aiohttp import ClientError, ClientResponseError from hass_nabucasa import Cloud, CloudError -from hass_nabucasa.api import CloudApiNonRetryableError +from hass_nabucasa.api import CloudApiError, CloudApiNonRetryableError from hass_nabucasa.cloud_api import ( FilesHandlerListEntry, async_files_delete_file, @@ -120,6 +121,8 @@ class CloudBackupAgent(BackupAgent): """ if not backup.protected: raise BackupAgentError("Cloud backups must be protected") + if self._cloud.subscription_expired: + raise BackupAgentError("Cloud subscription has expired") size = backup.size try: @@ -152,6 +155,13 @@ class CloudBackupAgent(BackupAgent): ) from err raise BackupAgentError(f"Failed to upload backup {err}") from err except CloudError as err: + if ( + isinstance(err, CloudApiError) + and isinstance(err.orig_exc, ClientResponseError) + and err.orig_exc.status == HTTPStatus.FORBIDDEN + and self._cloud.subscription_expired + ): + raise BackupAgentError("Cloud subscription has expired") from err if tries == _RETRY_LIMIT: raise BackupAgentError(f"Failed to upload backup {err}") from err tries += 1 diff --git a/homeassistant/components/cloudflare/config_flow.py b/homeassistant/components/cloudflare/config_flow.py index c3845a447e4..1fad38c5afc 100644 --- a/homeassistant/components/cloudflare/config_flow.py +++ b/homeassistant/components/cloudflare/config_flow.py @@ -9,7 +9,6 @@ from typing import Any import pycfdns import voluptuous as vol -from homeassistant.components import persistent_notification from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_TOKEN, CONF_ZONE from homeassistant.core import HomeAssistant @@ -118,8 +117,6 @@ class CloudflareConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" - persistent_notification.async_dismiss(self.hass, "cloudflare_setup") - errors: dict[str, str] = {} if user_input is not None: diff --git a/homeassistant/components/cloudflare/strings.json b/homeassistant/components/cloudflare/strings.json index 8c8ec57b074..453135f47a0 100644 --- a/homeassistant/components/cloudflare/strings.json +++ b/homeassistant/components/cloudflare/strings.json @@ -4,19 +4,19 @@ "step": { "user": { "title": "Connect to Cloudflare", - "description": "This integration requires an API Token created with Zone:Zone:Read and Zone:DNS:Edit permissions for all zones in your account.", + "description": "This integration requires an API token created with Zone:Zone:Read and Zone:DNS:Edit permissions for all zones in your account.", "data": { "api_token": "[%key:common::config_flow::data::api_token%]" } }, "zone": { - "title": "Choose the Zone to Update", + "title": "Choose the zone to update", "data": { "zone": "Zone" } }, "records": { - "title": "Choose the Records to Update", + "title": "Choose the records to update", "data": { "records": "Records" } @@ -40,7 +40,7 @@ "services": { "update_records": { "name": "Update records", - "description": "Manually trigger update to Cloudflare records." + "description": "Manually triggers an update of Cloudflare records." } } } diff --git a/homeassistant/components/comelit/alarm_control_panel.py b/homeassistant/components/comelit/alarm_control_panel.py index 5ecc9a63599..1ad26905dd1 100644 --- a/homeassistant/components/comelit/alarm_control_panel.py +++ b/homeassistant/components/comelit/alarm_control_panel.py @@ -41,6 +41,7 @@ ALARM_ACTIONS: dict[str, str] = { ALARM_AREA_ARMED_STATUS: dict[str, int] = { + DISABLE: 0, HOME_P1: 1, HOME_P2: 2, NIGHT: 3, @@ -128,20 +129,38 @@ class ComelitAlarmEntity(CoordinatorEntity[ComelitVedoSystem], AlarmControlPanel AlarmAreaState.TRIGGERED: AlarmControlPanelState.TRIGGERED, }.get(self._area.human_status) + async def _async_update_state(self, area_state: AlarmAreaState, armed: int) -> None: + """Update state after action.""" + self._area.human_status = area_state + self._area.armed = armed + await self.async_update_ha_state() + async def async_alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" if code != str(self._api.device_pin): return await self._api.set_zone_status(self._area.index, ALARM_ACTIONS[DISABLE]) + await self._async_update_state( + AlarmAreaState.DISARMED, ALARM_AREA_ARMED_STATUS[DISABLE] + ) async def async_alarm_arm_away(self, code: str | None = None) -> None: """Send arm away command.""" await self._api.set_zone_status(self._area.index, ALARM_ACTIONS[AWAY]) + await self._async_update_state( + AlarmAreaState.ARMED, ALARM_AREA_ARMED_STATUS[AWAY] + ) async def async_alarm_arm_home(self, code: str | None = None) -> None: """Send arm home command.""" await self._api.set_zone_status(self._area.index, ALARM_ACTIONS[HOME]) + await self._async_update_state( + AlarmAreaState.ARMED, ALARM_AREA_ARMED_STATUS[HOME_P1] + ) async def async_alarm_arm_night(self, code: str | None = None) -> None: """Send arm night command.""" await self._api.set_zone_status(self._area.index, ALARM_ACTIONS[NIGHT]) + await self._async_update_state( + AlarmAreaState.ARMED, ALARM_AREA_ARMED_STATUS[NIGHT] + ) diff --git a/homeassistant/components/comelit/const.py b/homeassistant/components/comelit/const.py index 84d8fbd6315..f52f33fd6da 100644 --- a/homeassistant/components/comelit/const.py +++ b/homeassistant/components/comelit/const.py @@ -9,3 +9,5 @@ _LOGGER = logging.getLogger(__package__) DOMAIN = "comelit" DEFAULT_PORT = 80 DEVICE_TYPE_LIST = [BRIDGE, VEDO] + +SCAN_INTERVAL = 5 diff --git a/homeassistant/components/comelit/coordinator.py b/homeassistant/components/comelit/coordinator.py index b3be3a47825..df4965d9945 100644 --- a/homeassistant/components/comelit/coordinator.py +++ b/homeassistant/components/comelit/coordinator.py @@ -22,7 +22,7 @@ from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers import device_registry as dr from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import _LOGGER, DOMAIN +from .const import _LOGGER, DOMAIN, SCAN_INTERVAL type ComelitConfigEntry = ConfigEntry[ComelitBaseCoordinator] @@ -53,7 +53,7 @@ class ComelitBaseCoordinator(DataUpdateCoordinator[T]): logger=_LOGGER, config_entry=entry, name=f"{DOMAIN}-{host}-coordinator", - update_interval=timedelta(seconds=5), + update_interval=timedelta(seconds=SCAN_INTERVAL), ) device_registry = dr.async_get(self.hass) device_registry.async_get_or_create( diff --git a/homeassistant/components/comelit/cover.py b/homeassistant/components/comelit/cover.py index 9bcf52ac111..befcb0c35d4 100644 --- a/homeassistant/components/comelit/cover.py +++ b/homeassistant/components/comelit/cover.py @@ -8,7 +8,7 @@ from aiocomelit import ComelitSerialBridgeObject from aiocomelit.const import COVER, STATE_COVER, STATE_OFF, STATE_ON from homeassistant.components.cover import CoverDeviceClass, CoverEntity, CoverState -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -98,13 +98,20 @@ class ComelitCoverEntity( """Return if the cover is opening.""" return self._current_action("opening") + async def _cover_set_state(self, action: int, state: int) -> None: + """Set desired cover state.""" + self._last_state = self.state + await self._api.set_device_status(COVER, self._device.index, action) + self.coordinator.data[COVER][self._device.index].status = state + self.async_write_ha_state() + async def async_close_cover(self, **kwargs: Any) -> None: """Close cover.""" - await self._api.set_device_status(COVER, self._device.index, STATE_OFF) + await self._cover_set_state(STATE_OFF, 2) async def async_open_cover(self, **kwargs: Any) -> None: """Open cover.""" - await self._api.set_device_status(COVER, self._device.index, STATE_ON) + await self._cover_set_state(STATE_ON, 1) async def async_stop_cover(self, **_kwargs: Any) -> None: """Stop the cover.""" @@ -112,13 +119,7 @@ class ComelitCoverEntity( return action = STATE_ON if self.is_closing else STATE_OFF - await self._api.set_device_status(COVER, self._device.index, action) - - @callback - def _handle_coordinator_update(self) -> None: - """Handle device update.""" - self._last_state = self.state - self.async_write_ha_state() + await self._cover_set_state(action, 0) async def async_added_to_hass(self) -> None: """Handle entity which will be added.""" diff --git a/homeassistant/components/comelit/light.py b/homeassistant/components/comelit/light.py index 09180d628a6..53cf6bdcb46 100644 --- a/homeassistant/components/comelit/light.py +++ b/homeassistant/components/comelit/light.py @@ -59,7 +59,8 @@ class ComelitLightEntity(CoordinatorEntity[ComelitSerialBridge], LightEntity): async def _light_set_state(self, state: int) -> None: """Set desired light state.""" await self.coordinator.api.set_device_status(LIGHT, self._device.index, state) - await self.coordinator.async_request_refresh() + self.coordinator.data[LIGHT][self._device.index].status = state + self.async_write_ha_state() async def async_turn_on(self, **kwargs: Any) -> None: """Turn the light on.""" diff --git a/homeassistant/components/comelit/manifest.json b/homeassistant/components/comelit/manifest.json index 8836af4e8dd..3abfc222e7d 100644 --- a/homeassistant/components/comelit/manifest.json +++ b/homeassistant/components/comelit/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["aiocomelit"], - "requirements": ["aiocomelit==0.11.2"] + "requirements": ["aiocomelit==0.11.3"] } diff --git a/homeassistant/components/comelit/switch.py b/homeassistant/components/comelit/switch.py index db89bd082f6..2c751cbe2cb 100644 --- a/homeassistant/components/comelit/switch.py +++ b/homeassistant/components/comelit/switch.py @@ -67,7 +67,8 @@ class ComelitSwitchEntity(CoordinatorEntity[ComelitSerialBridge], SwitchEntity): await self.coordinator.api.set_device_status( self._device.type, self._device.index, state ) - await self.coordinator.async_request_refresh() + self.coordinator.data[self._device.type][self._device.index].status = state + self.async_write_ha_state() async def async_turn_on(self, **kwargs: Any) -> None: """Turn the switch on.""" diff --git a/homeassistant/components/constructa/__init__.py b/homeassistant/components/constructa/__init__.py new file mode 100644 index 00000000000..1b3870860a0 --- /dev/null +++ b/homeassistant/components/constructa/__init__.py @@ -0,0 +1 @@ +"""Constructa virtual integration.""" diff --git a/homeassistant/components/constructa/manifest.json b/homeassistant/components/constructa/manifest.json new file mode 100644 index 00000000000..7b73f2e2ed0 --- /dev/null +++ b/homeassistant/components/constructa/manifest.json @@ -0,0 +1,6 @@ +{ + "domain": "constructa", + "name": "Constructa", + "integration_type": "virtual", + "supported_by": "home_connect" +} diff --git a/homeassistant/components/conversation/__init__.py b/homeassistant/components/conversation/__init__.py index 14c5244c18b..25aaf6df290 100644 --- a/homeassistant/components/conversation/__init__.py +++ b/homeassistant/components/conversation/__init__.py @@ -4,7 +4,6 @@ from __future__ import annotations from collections.abc import Callable import logging -import re from typing import Literal from hassil.recognize import RecognizeResult @@ -91,8 +90,6 @@ __all__ = [ _LOGGER = logging.getLogger(__name__) -REGEX_TYPE = type(re.compile("")) - SERVICE_PROCESS_SCHEMA = vol.Schema( { vol.Required(ATTR_TEXT): cv.string, diff --git a/homeassistant/components/conversation/default_agent.py b/homeassistant/components/conversation/default_agent.py index c30e8bb4a92..bed4b4c0dd6 100644 --- a/homeassistant/components/conversation/default_agent.py +++ b/homeassistant/components/conversation/default_agent.py @@ -650,7 +650,14 @@ class DefaultAgent(ConversationEntity): if ( (maybe_result is None) # first result - or (num_matched_entities > best_num_matched_entities) + or ( + # More literal text matched + result.text_chunks_matched > maybe_result.text_chunks_matched + ) + or ( + # More entities matched + num_matched_entities > best_num_matched_entities + ) or ( # Fewer unmatched entities (num_matched_entities == best_num_matched_entities) @@ -662,16 +669,6 @@ class DefaultAgent(ConversationEntity): and (num_unmatched_entities == best_num_unmatched_entities) and (num_unmatched_ranges > best_num_unmatched_ranges) ) - or ( - # More literal text matched - (num_matched_entities == best_num_matched_entities) - and (num_unmatched_entities == best_num_unmatched_entities) - and (num_unmatched_ranges == best_num_unmatched_ranges) - and ( - result.text_chunks_matched - > maybe_result.text_chunks_matched - ) - ) or ( # Prefer match failures with entities (result.text_chunks_matched == maybe_result.text_chunks_matched) diff --git a/homeassistant/components/conversation/http.py b/homeassistant/components/conversation/http.py index 4d8526a4fd4..efcdcb8d69b 100644 --- a/homeassistant/components/conversation/http.py +++ b/homeassistant/components/conversation/http.py @@ -3,11 +3,13 @@ from __future__ import annotations from collections.abc import Iterable +from dataclasses import asdict from typing import Any from aiohttp import web from hassil.recognize import MISSING_ENTITY, RecognizeResult from hassil.string_matcher import UnmatchedRangeEntity, UnmatchedTextEntity +from home_assistant_intents import get_language_scores import voluptuous as vol from homeassistant.components import http, websocket_api @@ -38,6 +40,7 @@ def async_setup(hass: HomeAssistant) -> None: websocket_api.async_register_command(hass, websocket_list_agents) websocket_api.async_register_command(hass, websocket_list_sentences) websocket_api.async_register_command(hass, websocket_hass_agent_debug) + websocket_api.async_register_command(hass, websocket_hass_agent_language_scores) @websocket_api.websocket_command( @@ -336,6 +339,36 @@ def _get_unmatched_slots( return unmatched_slots +@websocket_api.websocket_command( + { + vol.Required("type"): "conversation/agent/homeassistant/language_scores", + vol.Optional("language"): str, + vol.Optional("country"): str, + } +) +@websocket_api.async_response +async def websocket_hass_agent_language_scores( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Get support scores per language.""" + language = msg.get("language", hass.config.language) + country = msg.get("country", hass.config.country) + + scores = await hass.async_add_executor_job(get_language_scores) + matching_langs = language_util.matches(language, scores.keys(), country=country) + preferred_lang = matching_langs[0] if matching_langs else language + result = { + "languages": { + lang_key: asdict(lang_scores) for lang_key, lang_scores in scores.items() + }, + "preferred_language": preferred_lang, + } + + connection.send_result(msg["id"], result) + + class ConversationProcessView(http.HomeAssistantView): """View to process text.""" diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index 56d5e28e642..a1281764bd5 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/conversation", "integration_type": "system", "quality_scale": "internal", - "requirements": ["hassil==2.2.3", "home-assistant-intents==2025.3.23"] + "requirements": ["hassil==2.2.3", "home-assistant-intents==2025.3.28"] } diff --git a/homeassistant/components/conversation/util.py b/homeassistant/components/conversation/util.py deleted file mode 100644 index 4326c95cb66..00000000000 --- a/homeassistant/components/conversation/util.py +++ /dev/null @@ -1,37 +0,0 @@ -"""Util for Conversation.""" - -from __future__ import annotations - -import re - - -def create_matcher(utterance: str) -> re.Pattern[str]: - """Create a regex that matches the utterance.""" - # Split utterance into parts that are type: NORMAL, GROUP or OPTIONAL - # Pattern matches (GROUP|OPTIONAL): Change light to [the color] {name} - parts = re.split(r"({\w+}|\[[\w\s]+\] *)", utterance) - # Pattern to extract name from GROUP part. Matches {name} - group_matcher = re.compile(r"{(\w+)}") - # Pattern to extract text from OPTIONAL part. Matches [the color] - optional_matcher = re.compile(r"\[([\w ]+)\] *") - - pattern = ["^"] - for part in parts: - group_match = group_matcher.match(part) - optional_match = optional_matcher.match(part) - - # Normal part - if group_match is None and optional_match is None: - pattern.append(part) - continue - - # Group part - if group_match is not None: - pattern.append(rf"(?P<{group_match.groups()[0]}>[\w ]+?)\s*") - - # Optional part - elif optional_match is not None: - pattern.append(rf"(?:{optional_match.groups()[0]} *)?") - - pattern.append("$") - return re.compile("".join(pattern), re.IGNORECASE) diff --git a/homeassistant/components/cookidoo/strings.json b/homeassistant/components/cookidoo/strings.json index ae384fb6635..52f99133546 100644 --- a/homeassistant/components/cookidoo/strings.json +++ b/homeassistant/components/cookidoo/strings.json @@ -6,7 +6,7 @@ "data": { "email": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]", - "country": "Country" + "country": "[%key:common::config_flow::data::country%]" }, "data_description": { "email": "Email used to access your {cookidoo} account.", diff --git a/homeassistant/components/cover/strings.json b/homeassistant/components/cover/strings.json index 0afef8a200f..6ca8b50620f 100644 --- a/homeassistant/components/cover/strings.json +++ b/homeassistant/components/cover/strings.json @@ -38,10 +38,10 @@ "name": "[%key:component::cover::title%]", "state": { "open": "[%key:common::state::open%]", - "opening": "Opening", + "opening": "[%key:common::state::opening%]", "closed": "[%key:common::state::closed%]", - "closing": "Closing", - "stopped": "Stopped" + "closing": "[%key:common::state::closing%]", + "stopped": "[%key:common::state::stopped%]" }, "state_attributes": { "current_position": { diff --git a/homeassistant/components/deluge/config_flow.py b/homeassistant/components/deluge/config_flow.py index 19afe26e8f9..78eced64c7c 100644 --- a/homeassistant/components/deluge/config_flow.py +++ b/homeassistant/components/deluge/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +import logging from ssl import SSLError from typing import Any @@ -21,6 +22,8 @@ from .const import ( DOMAIN, ) +_LOGGER = logging.getLogger(__name__) + class DelugeFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a config flow for Deluge.""" @@ -86,7 +89,8 @@ class DelugeFlowHandler(ConfigFlow, domain=DOMAIN): await self.hass.async_add_executor_job(api.connect) except (ConnectionRefusedError, TimeoutError, SSLError): return "cannot_connect" - except Exception as ex: # noqa: BLE001 + except Exception as ex: + _LOGGER.exception("Unexpected error") if type(ex).__name__ == "BadLoginError": return "invalid_auth" return "unknown" diff --git a/homeassistant/components/dexcom/config_flow.py b/homeassistant/components/dexcom/config_flow.py index 90917e0ce2c..ed6dc94e764 100644 --- a/homeassistant/components/dexcom/config_flow.py +++ b/homeassistant/components/dexcom/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +import logging from typing import Any from pydexcom import AccountError, Dexcom, SessionError @@ -12,6 +13,8 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from .const import CONF_SERVER, DOMAIN, SERVER_OUS, SERVER_US +_LOGGER = logging.getLogger(__name__) + DATA_SCHEMA = vol.Schema( { vol.Required(CONF_USERNAME): str, @@ -43,7 +46,8 @@ class DexcomConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" except AccountError: errors["base"] = "invalid_auth" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected error") errors["base"] = "unknown" if "base" not in errors: diff --git a/homeassistant/components/discovergy/__init__.py b/homeassistant/components/discovergy/__init__.py index 9cf63176de6..0a8b7422f84 100644 --- a/homeassistant/components/discovergy/__init__.py +++ b/homeassistant/components/discovergy/__init__.py @@ -9,7 +9,7 @@ import pydiscovergy.error as discovergyError from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers.httpx_client import get_async_client +from homeassistant.helpers.httpx_client import create_async_httpx_client from .coordinator import DiscovergyConfigEntry, DiscovergyUpdateCoordinator @@ -21,7 +21,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: DiscovergyConfigEntry) - client = Discovergy( email=entry.data[CONF_EMAIL], password=entry.data[CONF_PASSWORD], - httpx_client=get_async_client(hass), + httpx_client=create_async_httpx_client(hass), authentication=BasicAuth(), ) diff --git a/homeassistant/components/duke_energy/config_flow.py b/homeassistant/components/duke_energy/config_flow.py index e06940b0fba..2ec92ff4c12 100644 --- a/homeassistant/components/duke_energy/config_flow.py +++ b/homeassistant/components/duke_energy/config_flow.py @@ -50,10 +50,10 @@ class DukeEnergyConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - username = auth["cdp_internal_user_id"].lower() + username = auth["internalUserID"].lower() await self.async_set_unique_id(username) self._abort_if_unique_id_configured() - email = auth["email"].lower() + email = auth["loginEmailAddress"].lower() data = { CONF_EMAIL: email, CONF_USERNAME: username, diff --git a/homeassistant/components/duke_energy/coordinator.py b/homeassistant/components/duke_energy/coordinator.py index 12a2f5fd6ae..a76168475c0 100644 --- a/homeassistant/components/duke_energy/coordinator.py +++ b/homeassistant/components/duke_energy/coordinator.py @@ -8,7 +8,11 @@ from aiodukeenergy import DukeEnergy from aiohttp import ClientError from homeassistant.components.recorder import get_instance -from homeassistant.components.recorder.models import StatisticData, StatisticMetaData +from homeassistant.components.recorder.models import ( + StatisticData, + StatisticMeanType, + StatisticMetaData, +) from homeassistant.components.recorder.statistics import ( async_add_external_statistics, get_last_statistics, @@ -137,7 +141,7 @@ class DukeEnergyCoordinator(DataUpdateCoordinator[None]): f"Duke Energy {meter['serviceType'].capitalize()} {serial_number}" ) consumption_metadata = StatisticMetaData( - has_mean=False, + mean_type=StatisticMeanType.NONE, has_sum=True, name=f"{name_prefix} Consumption", source=DOMAIN, diff --git a/homeassistant/components/duke_energy/manifest.json b/homeassistant/components/duke_energy/manifest.json index ece18d7ad2a..ad64fdd5cc4 100644 --- a/homeassistant/components/duke_energy/manifest.json +++ b/homeassistant/components/duke_energy/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["recorder"], "documentation": "https://www.home-assistant.io/integrations/duke_energy", "iot_class": "cloud_polling", - "requirements": ["aiodukeenergy==0.2.2"] + "requirements": ["aiodukeenergy==0.3.0"] } diff --git a/homeassistant/components/ecoforest/sensor.py b/homeassistant/components/ecoforest/sensor.py index c1d4aca6f0c..d0e4c17abe1 100644 --- a/homeassistant/components/ecoforest/sensor.py +++ b/homeassistant/components/ecoforest/sensor.py @@ -132,7 +132,7 @@ SENSOR_TYPES: tuple[EcoforestSensorEntityDescription, ...] = ( ), EcoforestSensorEntityDescription( key="convecto_air_flow", - translation_key="convecto_air_flow", + translation_key="convector_air_flow", native_unit_of_measurement=PERCENTAGE, entity_registry_enabled_default=False, value_fn=lambda data: data.convecto_air_flow, diff --git a/homeassistant/components/ecoforest/strings.json b/homeassistant/components/ecoforest/strings.json index 1094e10ada3..d0e807b5f2a 100644 --- a/homeassistant/components/ecoforest/strings.json +++ b/homeassistant/components/ecoforest/strings.json @@ -78,8 +78,8 @@ "extractor": { "name": "Extractor" }, - "convecto_air_flow": { - "name": "Convecto air flow" + "convector_air_flow": { + "name": "Convector air flow" } }, "number": { diff --git a/homeassistant/components/econet/water_heater.py b/homeassistant/components/econet/water_heater.py index fb74ae8b4a5..f93ad7f8872 100644 --- a/homeassistant/components/econet/water_heater.py +++ b/homeassistant/components/econet/water_heater.py @@ -91,15 +91,15 @@ class EcoNetWaterHeater(EcoNetEntity[WaterHeater], WaterHeaterEntity): def operation_list(self) -> list[str]: """List of available operation modes.""" econet_modes = self.water_heater.modes - op_list = [] + operation_modes = set() for mode in econet_modes: if ( mode is not WaterHeaterOperationMode.UNKNOWN and mode is not WaterHeaterOperationMode.VACATION ): ha_mode = ECONET_STATE_TO_HA[mode] - op_list.append(ha_mode) - return op_list + operation_modes.add(ha_mode) + return list(operation_modes) @property def supported_features(self) -> WaterHeaterEntityFeature: diff --git a/homeassistant/components/ecovacs/manifest.json b/homeassistant/components/ecovacs/manifest.json index 6d3dc5c9be6..acb5b620719 100644 --- a/homeassistant/components/ecovacs/manifest.json +++ b/homeassistant/components/ecovacs/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/ecovacs", "iot_class": "cloud_push", "loggers": ["sleekxmppfs", "sucks", "deebot_client"], - "requirements": ["py-sucks==0.9.10", "deebot-client==12.3.1"] + "requirements": ["py-sucks==0.9.10", "deebot-client==12.4.0"] } diff --git a/homeassistant/components/ecovacs/strings.json b/homeassistant/components/ecovacs/strings.json index 44c51c7ae43..515eb1c3141 100644 --- a/homeassistant/components/ecovacs/strings.json +++ b/homeassistant/components/ecovacs/strings.json @@ -14,7 +14,7 @@ "step": { "auth": { "data": { - "country": "Country", + "country": "[%key:common::config_flow::data::country%]", "override_rest_url": "REST URL", "override_mqtt_url": "MQTT URL", "password": "[%key:common::config_flow::data::password%]", diff --git a/homeassistant/components/ecowitt/sensor.py b/homeassistant/components/ecowitt/sensor.py index 6968acdfa4f..7d37aa40b86 100644 --- a/homeassistant/components/ecowitt/sensor.py +++ b/homeassistant/components/ecowitt/sensor.py @@ -68,6 +68,7 @@ ECOWITT_SENSORS_MAPPING: Final = { key="DEGREE", native_unit_of_measurement=DEGREE, device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), EcoWittSensorTypes.WATT_METERS_SQUARED: SensorEntityDescription( key="WATT_METERS_SQUARED", diff --git a/homeassistant/components/eheimdigital/config_flow.py b/homeassistant/components/eheimdigital/config_flow.py index c6535608b0c..b0432267c8e 100644 --- a/homeassistant/components/eheimdigital/config_flow.py +++ b/homeassistant/components/eheimdigital/config_flow.py @@ -62,6 +62,7 @@ class EheimDigitalConfigFlow(ConfigFlow, domain=DOMAIN): except (ClientError, TimeoutError): return self.async_abort(reason="cannot_connect") except Exception: # noqa: BLE001 + LOGGER.exception("Unknown exception occurred") return self.async_abort(reason="unknown") await self.async_set_unique_id(hub.main.mac_address) self._abort_if_unique_id_configured(updates={CONF_HOST: host}) diff --git a/homeassistant/components/elvia/importer.py b/homeassistant/components/elvia/importer.py index 4e8b7f716ef..caca787237c 100644 --- a/homeassistant/components/elvia/importer.py +++ b/homeassistant/components/elvia/importer.py @@ -7,7 +7,11 @@ from typing import TYPE_CHECKING, cast from elvia import Elvia, error as ElviaError -from homeassistant.components.recorder.models import StatisticData, StatisticMetaData +from homeassistant.components.recorder.models import ( + StatisticData, + StatisticMeanType, + StatisticMetaData, +) from homeassistant.components.recorder.statistics import ( async_add_external_statistics, get_last_statistics, @@ -144,7 +148,7 @@ class ElviaImporter: async_add_external_statistics( hass=self.hass, metadata=StatisticMetaData( - has_mean=False, + mean_type=StatisticMeanType.NONE, has_sum=True, name=f"{self.metering_point_id} Consumption", source=DOMAIN, diff --git a/homeassistant/components/emulated_roku/__init__.py b/homeassistant/components/emulated_roku/__init__.py index d4466f47ef2..e8c3a00f098 100644 --- a/homeassistant/components/emulated_roku/__init__.py +++ b/homeassistant/components/emulated_roku/__init__.py @@ -46,6 +46,8 @@ CONFIG_SCHEMA = vol.Schema( extra=vol.ALLOW_EXTRA, ) +type EmulatedRokuConfigEntry = ConfigEntry[EmulatedRoku] + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the emulated roku component.""" @@ -65,22 +67,21 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return True -async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_setup_entry( + hass: HomeAssistant, entry: EmulatedRokuConfigEntry +) -> bool: """Set up an emulated roku server from a config entry.""" - config = config_entry.data - - if DOMAIN not in hass.data: - hass.data[DOMAIN] = {} - - name = config[CONF_NAME] - listen_port = config[CONF_LISTEN_PORT] - host_ip = config.get(CONF_HOST_IP) or await async_get_source_ip(hass) - advertise_ip = config.get(CONF_ADVERTISE_IP) - advertise_port = config.get(CONF_ADVERTISE_PORT) - upnp_bind_multicast = config.get(CONF_UPNP_BIND_MULTICAST) + config = entry.data + name: str = config[CONF_NAME] + listen_port: int = config[CONF_LISTEN_PORT] + host_ip: str = config.get(CONF_HOST_IP) or await async_get_source_ip(hass) + advertise_ip: str | None = config.get(CONF_ADVERTISE_IP) + advertise_port: int | None = config.get(CONF_ADVERTISE_PORT) + upnp_bind_multicast: bool | None = config.get(CONF_UPNP_BIND_MULTICAST) server = EmulatedRoku( hass, + entry.entry_id, name, host_ip, listen_port, @@ -88,14 +89,12 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b advertise_port, upnp_bind_multicast, ) - - hass.data[DOMAIN][name] = server - + entry.runtime_data = server return await server.setup() -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: EmulatedRokuConfigEntry +) -> bool: """Unload a config entry.""" - name = entry.data[CONF_NAME] - server = hass.data[DOMAIN].pop(name) - return await server.unload() + return await entry.runtime_data.unload() diff --git a/homeassistant/components/emulated_roku/binding.py b/homeassistant/components/emulated_roku/binding.py index a84db4bd77b..6d8d9c4014f 100644 --- a/homeassistant/components/emulated_roku/binding.py +++ b/homeassistant/components/emulated_roku/binding.py @@ -5,7 +5,13 @@ import logging from emulated_roku import EmulatedRokuCommandHandler, EmulatedRokuServer from homeassistant.const import EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP -from homeassistant.core import CoreState, EventOrigin +from homeassistant.core import ( + CALLBACK_TYPE, + CoreState, + Event, + EventOrigin, + HomeAssistant, +) LOGGER = logging.getLogger(__package__) @@ -27,16 +33,18 @@ class EmulatedRoku: def __init__( self, - hass, - name, - host_ip, - listen_port, - advertise_ip, - advertise_port, - upnp_bind_multicast, - ): + hass: HomeAssistant, + entry_id: str, + name: str, + host_ip: str, + listen_port: int, + advertise_ip: str | None, + advertise_port: int | None, + upnp_bind_multicast: bool | None, + ) -> None: """Initialize the properties.""" self.hass = hass + self.entry_id = entry_id self.roku_usn = name self.host_ip = host_ip @@ -47,21 +55,21 @@ class EmulatedRoku: self.bind_multicast = upnp_bind_multicast - self._api_server = None + self._api_server: EmulatedRokuServer | None = None - self._unsub_start_listener = None - self._unsub_stop_listener = None + self._unsub_start_listener: CALLBACK_TYPE | None = None + self._unsub_stop_listener: CALLBACK_TYPE | None = None - async def setup(self): + async def setup(self) -> bool: """Start the emulated_roku server.""" class EventCommandHandler(EmulatedRokuCommandHandler): """emulated_roku command handler to turn commands into events.""" - def __init__(self, hass): + def __init__(self, hass: HomeAssistant) -> None: self.hass = hass - def on_keydown(self, roku_usn, key): + def on_keydown(self, roku_usn: str, key: str) -> None: """Handle keydown event.""" self.hass.bus.async_fire( EVENT_ROKU_COMMAND, @@ -73,7 +81,7 @@ class EmulatedRoku: EventOrigin.local, ) - def on_keyup(self, roku_usn, key): + def on_keyup(self, roku_usn: str, key: str) -> None: """Handle keyup event.""" self.hass.bus.async_fire( EVENT_ROKU_COMMAND, @@ -85,7 +93,7 @@ class EmulatedRoku: EventOrigin.local, ) - def on_keypress(self, roku_usn, key): + def on_keypress(self, roku_usn: str, key: str) -> None: """Handle keypress event.""" self.hass.bus.async_fire( EVENT_ROKU_COMMAND, @@ -97,7 +105,7 @@ class EmulatedRoku: EventOrigin.local, ) - def launch(self, roku_usn, app_id): + def launch(self, roku_usn: str, app_id: str) -> None: """Handle launch event.""" self.hass.bus.async_fire( EVENT_ROKU_COMMAND, @@ -129,17 +137,19 @@ class EmulatedRoku: bind_multicast=self.bind_multicast, ) - async def emulated_roku_stop(event): + async def emulated_roku_stop(event: Event | None) -> None: """Wrap the call to emulated_roku.close.""" LOGGER.debug("Stopping emulated_roku %s", self.roku_usn) self._unsub_stop_listener = None + assert self._api_server is not None await self._api_server.close() - async def emulated_roku_start(event): + async def emulated_roku_start(event: Event | None) -> None: """Wrap the call to emulated_roku.start.""" try: LOGGER.debug("Starting emulated_roku %s", self.roku_usn) self._unsub_start_listener = None + assert self._api_server is not None await self._api_server.start() except OSError: LOGGER.exception( @@ -165,7 +175,7 @@ class EmulatedRoku: return True - async def unload(self): + async def unload(self) -> bool: """Unload the emulated_roku server.""" LOGGER.debug("Unloading emulated_roku %s", self.roku_usn) @@ -177,6 +187,7 @@ class EmulatedRoku: self._unsub_stop_listener() self._unsub_stop_listener = None + assert self._api_server is not None await self._api_server.close() return True diff --git a/homeassistant/components/energy/data.py b/homeassistant/components/energy/data.py index ff86177cf41..442aedf23b0 100644 --- a/homeassistant/components/energy/data.py +++ b/homeassistant/components/energy/data.py @@ -139,6 +139,10 @@ class DeviceConsumption(TypedDict): # An optional custom name for display in energy graphs name: str | None + # An optional statistic_id identifying a device + # that includes this device's consumption in its total + included_in_stat: str | None + class EnergyPreferences(TypedDict): """Dictionary holding the energy data.""" @@ -291,6 +295,7 @@ DEVICE_CONSUMPTION_SCHEMA = vol.Schema( { vol.Required("stat_consumption"): str, vol.Optional("name"): str, + vol.Optional("included_in_stat"): str, } ) diff --git a/homeassistant/components/enigma2/config_flow.py b/homeassistant/components/enigma2/config_flow.py index b0649a8368d..876d55128cf 100644 --- a/homeassistant/components/enigma2/config_flow.py +++ b/homeassistant/components/enigma2/config_flow.py @@ -1,5 +1,6 @@ """Config flow for Enigma2.""" +import logging from typing import Any, cast from aiohttp.client_exceptions import ClientError @@ -63,6 +64,8 @@ CONFIG_SCHEMA = vol.Schema( } ) +_LOGGER = logging.getLogger(__name__) + async def get_options_schema(handler: SchemaCommonFlowHandler) -> vol.Schema: """Get the options schema.""" @@ -130,7 +133,8 @@ class Enigma2ConfigFlowHandler(ConfigFlow, domain=DOMAIN): errors = {"base": "invalid_auth"} except ClientError: errors = {"base": "cannot_connect"} - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors = {"base": "unknown"} else: unique_id = about["info"]["ifaces"][0]["mac"] or self.unique_id diff --git a/homeassistant/components/enphase_envoy/strings.json b/homeassistant/components/enphase_envoy/strings.json index b498c59e0d3..ce3a8593226 100644 --- a/homeassistant/components/enphase_envoy/strings.json +++ b/homeassistant/components/enphase_envoy/strings.json @@ -187,13 +187,13 @@ "name": "Lifetime energy consumption {phase_name}" }, "balanced_net_consumption": { - "name": "balanced net power consumption" + "name": "Balanced net power consumption" }, "lifetime_balanced_net_consumption": { "name": "Lifetime balanced net energy consumption" }, "balanced_net_consumption_phase": { - "name": "balanced net power consumption {phase_name}" + "name": "Balanced net power consumption {phase_name}" }, "lifetime_balanced_net_consumption_phase": { "name": "Lifetime balanced net energy consumption {phase_name}" @@ -217,7 +217,7 @@ "name": "Net consumption CT current" }, "net_ct_powerfactor": { - "name": "Powerfactor net consumption CT" + "name": "Power factor net consumption CT" }, "net_ct_metering_status": { "name": "Metering status net consumption CT" @@ -235,7 +235,7 @@ "name": "Production CT current" }, "production_ct_powerfactor": { - "name": "powerfactor production CT" + "name": "Power factor production CT" }, "production_ct_metering_status": { "name": "Metering status production CT" @@ -262,7 +262,7 @@ "name": "Storage CT current" }, "storage_ct_powerfactor": { - "name": "Powerfactor storage CT" + "name": "Power factor storage CT" }, "storage_ct_metering_status": { "name": "Metering status storage CT" @@ -289,7 +289,7 @@ "name": "Net consumption CT current {phase_name}" }, "net_ct_powerfactor_phase": { - "name": "Powerfactor net consumption CT {phase_name}" + "name": "Power factor net consumption CT {phase_name}" }, "net_ct_metering_status_phase": { "name": "Metering status net consumption CT {phase_name}" @@ -307,7 +307,7 @@ "name": "Production CT current {phase_name}" }, "production_ct_powerfactor_phase": { - "name": "Powerfactor production CT {phase_name}" + "name": "Power factor production CT {phase_name}" }, "production_ct_metering_status_phase": { "name": "Metering status production CT {phase_name}" @@ -334,7 +334,7 @@ "name": "Storage CT current {phase_name}" }, "storage_ct_powerfactor_phase": { - "name": "Powerfactor storage CT {phase_name}" + "name": "Power factor storage CT {phase_name}" }, "storage_ct_metering_status_phase": { "name": "Metering status storage CT {phase_name}" diff --git a/homeassistant/components/environment_canada/sensor.py b/homeassistant/components/environment_canada/sensor.py index 3a789289c74..1685888d2bc 100644 --- a/homeassistant/components/environment_canada/sensor.py +++ b/homeassistant/components/environment_canada/sensor.py @@ -168,6 +168,7 @@ SENSOR_TYPES: tuple[ECSensorEntityDescription, ...] = ( native_unit_of_measurement=DEGREE, value_fn=lambda data: data.conditions.get("wind_bearing", {}).get("value"), device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), ECSensorEntityDescription( key="wind_chill", diff --git a/homeassistant/components/epic_games_store/strings.json b/homeassistant/components/epic_games_store/strings.json index 58a87a55f81..ab4562a72ad 100644 --- a/homeassistant/components/epic_games_store/strings.json +++ b/homeassistant/components/epic_games_store/strings.json @@ -3,8 +3,8 @@ "step": { "user": { "data": { - "language": "Language", - "country": "Country" + "language": "[%key:common::config_flow::data::language%]", + "country": "[%key:common::config_flow::data::country%]" } } }, diff --git a/homeassistant/components/esphome/assist_satellite.py b/homeassistant/components/esphome/assist_satellite.py index fdd16d20d77..9d92b5fcb92 100644 --- a/homeassistant/components/esphome/assist_satellite.py +++ b/homeassistant/components/esphome/assist_satellite.py @@ -253,6 +253,11 @@ class EsphomeAssistSatellite( # Will use media player for TTS/announcements self._update_tts_format() + if feature_flags & VoiceAssistantFeature.START_CONVERSATION: + self._attr_supported_features |= ( + assist_satellite.AssistSatelliteEntityFeature.START_CONVERSATION + ) + # Update wake word select when config is updated self.async_on_remove( self.entry_data.async_register_assist_satellite_set_wake_word_callback( @@ -305,12 +310,13 @@ class EsphomeAssistSatellite( self.entry_data.api_version ) ) - if feature_flags & VoiceAssistantFeature.SPEAKER: - media_id = tts_output["media_id"] + if feature_flags & VoiceAssistantFeature.SPEAKER and ( + stream := tts.async_get_stream(self.hass, tts_output["token"]) + ): self._tts_streaming_task = ( self.config_entry.async_create_background_task( self.hass, - self._stream_tts_audio(media_id), + self._stream_tts_audio(stream), "esphome_voice_assistant_tts", ) ) @@ -342,14 +348,33 @@ class EsphomeAssistSatellite( Should block until the announcement is done playing. """ + await self._do_announce(announcement, run_pipeline_after=False) + + async def async_start_conversation( + self, start_announcement: assist_satellite.AssistSatelliteAnnouncement + ) -> None: + """Start a conversation from the satellite.""" + await self._do_announce(start_announcement, run_pipeline_after=True) + + async def _do_announce( + self, + announcement: assist_satellite.AssistSatelliteAnnouncement, + run_pipeline_after: bool, + ) -> None: + """Announce media on the satellite. + + Optionally run a voice pipeline after the announcement has finished. + """ _LOGGER.debug( "Waiting for announcement to finished (message=%s, media_id=%s)", announcement.message, announcement.media_id, ) media_id = announcement.media_id - if announcement.media_id_source != "tts": - # Route non-TTS media through the proxy + is_media_tts = announcement.media_id_source == "tts" + preannounce_media_id = announcement.preannounce_media_id + if (not is_media_tts) or preannounce_media_id: + # Route media through the proxy format_to_use: MediaPlayerSupportedFormat | None = None for supported_format in chain( *self.entry_data.media_player_formats.values() @@ -362,19 +387,33 @@ class EsphomeAssistSatellite( assert (self.registry_entry is not None) and ( self.registry_entry.device_id is not None ) - proxy_url = async_create_proxy_url( - self.hass, - self.registry_entry.device_id, - media_id, + + make_proxy_url = partial( + async_create_proxy_url, + hass=self.hass, + device_id=self.registry_entry.device_id, media_format=format_to_use.format, rate=format_to_use.sample_rate or None, channels=format_to_use.num_channels or None, width=format_to_use.sample_bytes or None, ) - media_id = async_process_play_media_url(self.hass, proxy_url) + + if not is_media_tts: + media_id = async_process_play_media_url( + self.hass, make_proxy_url(media_url=media_id) + ) + + if preannounce_media_id: + preannounce_media_id = async_process_play_media_url( + self.hass, make_proxy_url(media_url=preannounce_media_id) + ) await self.cli.send_voice_assistant_announcement_await_response( - media_id, _ANNOUNCEMENT_TIMEOUT_SEC, announcement.message + media_id, + _ANNOUNCEMENT_TIMEOUT_SEC, + announcement.message, + start_conversation=run_pipeline_after, + preannounce_media_id=preannounce_media_id or "", ) async def handle_pipeline_start( @@ -526,7 +565,7 @@ class EsphomeAssistSatellite( async def _stream_tts_audio( self, - media_id: str, + tts_result: tts.ResultStream, sample_rate: int = 16000, sample_width: int = 2, sample_channels: int = 1, @@ -541,15 +580,14 @@ class EsphomeAssistSatellite( if not self._is_running: return - extension, data = await tts.async_get_media_source_audio( - self.hass, - media_id, - ) - - if extension != "wav": - _LOGGER.error("Only WAV audio can be streamed, got %s", extension) + if tts_result.extension != "wav": + _LOGGER.error( + "Only WAV audio can be streamed, got %s", tts_result.extension + ) return + data = b"".join([chunk async for chunk in tts_result.async_stream_result()]) + with io.BytesIO(data) as wav_io, wave.open(wav_io, "rb") as wav_file: if ( (wav_file.getframerate() != sample_rate) diff --git a/homeassistant/components/esphome/entry_data.py b/homeassistant/components/esphome/entry_data.py index fc41ee99a00..023c6f70da4 100644 --- a/homeassistant/components/esphome/entry_data.py +++ b/homeassistant/components/esphome/entry_data.py @@ -282,15 +282,18 @@ class RuntimeEntryData: ) -> None: """Distribute an update of static infos to all platforms.""" # First, load all platforms - needed_platforms = set() - if async_get_dashboard(hass): - needed_platforms.add(Platform.UPDATE) + needed_platforms: set[Platform] = set() - if self.device_info and self.device_info.voice_assistant_feature_flags_compat( - self.api_version - ): - needed_platforms.add(Platform.BINARY_SENSOR) - needed_platforms.add(Platform.SELECT) + if self.device_info: + if async_get_dashboard(hass): + # Only load the update platform if the device_info is set + # When we restore the entry, the device_info may not be set yet + # and we don't want to load the update platform since it needs + # a complete device_info. + needed_platforms.add(Platform.UPDATE) + if self.device_info.voice_assistant_feature_flags_compat(self.api_version): + needed_platforms.add(Platform.BINARY_SENSOR) + needed_platforms.add(Platform.SELECT) ent_reg = er.async_get(hass) registry_get_entity = ent_reg.async_get_entity_id @@ -312,18 +315,19 @@ class RuntimeEntryData: # Make a dict of the EntityInfo by type and send # them to the listeners for each specific EntityInfo type - infos_by_type: dict[type[EntityInfo], list[EntityInfo]] = {} + infos_by_type: defaultdict[type[EntityInfo], list[EntityInfo]] = defaultdict( + list + ) for info in infos: - info_type = type(info) - if info_type not in infos_by_type: - infos_by_type[info_type] = [] - infos_by_type[info_type].append(info) + infos_by_type[type(info)].append(info) - callbacks_by_type = self.entity_info_callbacks - for type_, entity_infos in infos_by_type.items(): - if callbacks_ := callbacks_by_type.get(type_): - for callback_ in callbacks_: - callback_(entity_infos) + for type_, callbacks in self.entity_info_callbacks.items(): + # If all entities for a type are removed, we + # still need to call the callbacks with an empty list + # to make sure the entities are removed. + entity_infos = infos_by_type.get(type_, []) + for callback_ in callbacks: + callback_(entity_infos) # Finally update static info subscriptions for callback_ in self.static_info_update_subscriptions: diff --git a/homeassistant/components/esphome/event.py b/homeassistant/components/esphome/event.py index 11a5d0cfb33..f4db3844e3d 100644 --- a/homeassistant/components/esphome/event.py +++ b/homeassistant/components/esphome/event.py @@ -33,6 +33,16 @@ class EsphomeEvent(EsphomeEntity[EventInfo, Event], EventEntity): self._trigger_event(self._state.event_type) self.async_write_ha_state() + @callback + def _on_device_update(self) -> None: + """Call when device updates or entry data changes.""" + super()._on_device_update() + if self._entry_data.available: + # Event entities should go available directly + # when the device comes online and not wait + # for the next data push. + self.async_write_ha_state() + async_setup_entry = partial( platform_async_setup_entry, diff --git a/homeassistant/components/everlights/light.py b/homeassistant/components/everlights/light.py index ae159d77240..c153f01e83c 100644 --- a/homeassistant/components/everlights/light.py +++ b/homeassistant/components/everlights/light.py @@ -4,7 +4,7 @@ from __future__ import annotations from datetime import timedelta import logging -from typing import Any +from typing import Any, cast import pyeverlights import voluptuous as vol @@ -84,7 +84,7 @@ class EverLightsLight(LightEntity): api: pyeverlights.EverLights, channel: int, status: dict[str, Any], - effects, + effects: list[str], ) -> None: """Initialize the light.""" self._api = api @@ -106,8 +106,10 @@ class EverLightsLight(LightEntity): async def async_turn_on(self, **kwargs: Any) -> None: """Turn the light on.""" - hs_color = kwargs.get(ATTR_HS_COLOR, self._attr_hs_color) - brightness = kwargs.get(ATTR_BRIGHTNESS, self._attr_brightness) + hs_color = cast( + tuple[float, float], kwargs.get(ATTR_HS_COLOR, self._attr_hs_color) + ) + brightness = cast(int, kwargs.get(ATTR_BRIGHTNESS, self._attr_brightness)) effect = kwargs.get(ATTR_EFFECT) if effect is not None: @@ -116,7 +118,7 @@ class EverLightsLight(LightEntity): rgb = color_int_to_rgb(colors[0]) hsv = color_util.color_RGB_to_hsv(*rgb) hs_color = hsv[:2] - brightness = hsv[2] / 100 * 255 + brightness = round(hsv[2] / 100 * 255) else: rgb = color_util.color_hsv_to_RGB( diff --git a/homeassistant/components/fints/sensor.py b/homeassistant/components/fints/sensor.py index 318325dbb09..f5188d5bf21 100644 --- a/homeassistant/components/fints/sensor.py +++ b/homeassistant/components/fints/sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections import namedtuple from datetime import timedelta import logging -from typing import Any +from typing import Any, cast from fints.client import FinTS3PinTanClient from fints.models import SEPAAccount @@ -73,7 +73,7 @@ def setup_platform( credentials = BankCredentials( config[CONF_BIN], config[CONF_USERNAME], config[CONF_PIN], config[CONF_URL] ) - fints_name = config.get(CONF_NAME, config[CONF_BIN]) + fints_name = cast(str, config.get(CONF_NAME, config[CONF_BIN])) account_config = { acc[CONF_ACCOUNT]: acc[CONF_NAME] for acc in config[CONF_ACCOUNTS] diff --git a/homeassistant/components/fritz/sensor.py b/homeassistant/components/fritz/sensor.py index bcee590460f..88de9ebdefc 100644 --- a/homeassistant/components/fritz/sensor.py +++ b/homeassistant/components/fritz/sensor.py @@ -193,7 +193,6 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = ( translation_key="max_kb_s_sent", native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND, device_class=SensorDeviceClass.DATA_RATE, - entity_category=EntityCategory.DIAGNOSTIC, value_fn=_retrieve_max_kb_s_sent_state, ), FritzSensorEntityDescription( @@ -201,7 +200,6 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = ( translation_key="max_kb_s_received", native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND, device_class=SensorDeviceClass.DATA_RATE, - entity_category=EntityCategory.DIAGNOSTIC, value_fn=_retrieve_max_kb_s_received_state, ), FritzSensorEntityDescription( @@ -225,6 +223,7 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = ( translation_key="link_kb_s_sent", native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND, device_class=SensorDeviceClass.DATA_RATE, + entity_category=EntityCategory.DIAGNOSTIC, value_fn=_retrieve_link_kb_s_sent_state, ), FritzSensorEntityDescription( @@ -232,6 +231,7 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = ( translation_key="link_kb_s_received", native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND, device_class=SensorDeviceClass.DATA_RATE, + entity_category=EntityCategory.DIAGNOSTIC, value_fn=_retrieve_link_kb_s_received_state, ), FritzSensorEntityDescription( diff --git a/homeassistant/components/fritzbox/climate.py b/homeassistant/components/fritzbox/climate.py index 118e03c391f..57c7e2a696f 100644 --- a/homeassistant/components/fritzbox/climate.py +++ b/homeassistant/components/fritzbox/climate.py @@ -6,6 +6,7 @@ from typing import Any from homeassistant.components.climate import ( ATTR_HVAC_MODE, + PRESET_BOOST, PRESET_COMFORT, PRESET_ECO, ClimateEntity, @@ -38,7 +39,7 @@ from .sensor import value_scheduled_preset HVAC_MODES = [HVACMode.HEAT, HVACMode.OFF] PRESET_HOLIDAY = "holiday" PRESET_SUMMER = "summer" -PRESET_MODES = [PRESET_ECO, PRESET_COMFORT] +PRESET_MODES = [PRESET_ECO, PRESET_COMFORT, PRESET_BOOST] SUPPORTED_FEATURES = ( ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.PRESET_MODE @@ -194,6 +195,8 @@ class FritzboxThermostat(FritzBoxDeviceEntity, ClimateEntity): return PRESET_HOLIDAY if self.data.summer_active: return PRESET_SUMMER + if self.data.target_temperature == ON_API_TEMPERATURE: + return PRESET_BOOST if self.data.target_temperature == self.data.comfort_temperature: return PRESET_COMFORT if self.data.target_temperature == self.data.eco_temperature: @@ -211,6 +214,8 @@ class FritzboxThermostat(FritzBoxDeviceEntity, ClimateEntity): await self.async_set_temperature(temperature=self.data.comfort_temperature) elif preset_mode == PRESET_ECO: await self.async_set_temperature(temperature=self.data.eco_temperature) + elif preset_mode == PRESET_BOOST: + await self.async_set_temperature(temperature=ON_REPORT_SET_TEMPERATURE) @property def extra_state_attributes(self) -> ClimateExtraAttributes: diff --git a/homeassistant/components/fronius/config_flow.py b/homeassistant/components/fronius/config_flow.py index f35c9ce5bc1..b8aa2da81c6 100644 --- a/homeassistant/components/fronius/config_flow.py +++ b/homeassistant/components/fronius/config_flow.py @@ -149,7 +149,7 @@ class FroniusConfigFlow(ConfigFlow, domain=DOMAIN): unique_id, info = await validate_host(self.hass, user_input[CONF_HOST]) except CannotConnect: errors["base"] = "cannot_connect" - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: diff --git a/homeassistant/components/fronius/strings.json b/homeassistant/components/fronius/strings.json index b77f6fec83c..36778f2ca5f 100644 --- a/homeassistant/components/fronius/strings.json +++ b/homeassistant/components/fronius/strings.json @@ -182,10 +182,10 @@ "state": { "startup": "Startup", "running": "Running", - "standby": "Standby", + "standby": "[%key:common::state::standby%]", "bootloading": "Bootloading", "error": "Error", - "idle": "Idle", + "idle": "[%key:common::state::idle%]", "ready": "Ready", "sleeping": "Sleeping" } diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index b210fdb6661..884436ad4db 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20250306.0"] + "requirements": ["home-assistant-frontend==20250328.0"] } diff --git a/homeassistant/components/frontier_silicon/config_flow.py b/homeassistant/components/frontier_silicon/config_flow.py index f6514da28ff..dc4f6bea989 100644 --- a/homeassistant/components/frontier_silicon/config_flow.py +++ b/homeassistant/components/frontier_silicon/config_flow.py @@ -108,8 +108,8 @@ class FrontierSiliconConfigFlow(ConfigFlow, domain=DOMAIN): self._webfsapi_url = await AFSAPI.get_webfsapi_endpoint(device_url) except FSConnectionError: return self.async_abort(reason="cannot_connect") - except Exception as exception: # noqa: BLE001 - _LOGGER.debug(exception) + except Exception: + _LOGGER.exception("Unexpected exception") return self.async_abort(reason="unknown") # try to login with default pin diff --git a/homeassistant/components/fujitsu_fglair/config_flow.py b/homeassistant/components/fujitsu_fglair/config_flow.py index c4b097ff0de..9369fd7b7cd 100644 --- a/homeassistant/components/fujitsu_fglair/config_flow.py +++ b/homeassistant/components/fujitsu_fglair/config_flow.py @@ -62,7 +62,7 @@ class FGLairConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" except AylaAuthError: errors["base"] = "invalid_auth" - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" diff --git a/homeassistant/components/fyta/config_flow.py b/homeassistant/components/fyta/config_flow.py index 78cb7647785..9c5ab1de405 100644 --- a/homeassistant/components/fyta/config_flow.py +++ b/homeassistant/components/fyta/config_flow.py @@ -65,8 +65,8 @@ class FytaConfigFlow(ConfigFlow, domain=DOMAIN): return {"base": "invalid_auth"} except FytaPasswordError: return {"base": "invalid_auth", CONF_PASSWORD: "password_error"} - except Exception as e: # noqa: BLE001 - _LOGGER.error(e) + except Exception: + _LOGGER.exception("Unexpected exception") return {"base": "unknown"} finally: await fyta.client.close() diff --git a/homeassistant/components/gaggenau/__init__.py b/homeassistant/components/gaggenau/__init__.py new file mode 100644 index 00000000000..2c03410c35d --- /dev/null +++ b/homeassistant/components/gaggenau/__init__.py @@ -0,0 +1 @@ +"""Gaggenau virtual integration.""" diff --git a/homeassistant/components/gaggenau/manifest.json b/homeassistant/components/gaggenau/manifest.json new file mode 100644 index 00000000000..9dc38b2e4b3 --- /dev/null +++ b/homeassistant/components/gaggenau/manifest.json @@ -0,0 +1,6 @@ +{ + "domain": "gaggenau", + "name": "Gaggenau", + "integration_type": "virtual", + "supported_by": "home_connect" +} diff --git a/homeassistant/components/generic_thermostat/climate.py b/homeassistant/components/generic_thermostat/climate.py index 190caa58b3f..185040f02c9 100644 --- a/homeassistant/components/generic_thermostat/climate.py +++ b/homeassistant/components/generic_thermostat/climate.py @@ -539,10 +539,14 @@ class GenericThermostat(ClimateEntity, RestoreEntity): return assert self._cur_temp is not None and self._target_temp is not None - too_cold = self._target_temp >= self._cur_temp + self._cold_tolerance - too_hot = self._cur_temp >= self._target_temp + self._hot_tolerance + + min_temp = self._target_temp - self._cold_tolerance + max_temp = self._target_temp + self._hot_tolerance + if self._is_device_active: - if (self.ac_mode and too_cold) or (not self.ac_mode and too_hot): + if (self.ac_mode and self._cur_temp <= min_temp) or ( + not self.ac_mode and self._cur_temp >= max_temp + ): _LOGGER.debug("Turning off heater %s", self.heater_entity_id) await self._async_heater_turn_off() elif time is not None: @@ -552,7 +556,9 @@ class GenericThermostat(ClimateEntity, RestoreEntity): self.heater_entity_id, ) await self._async_heater_turn_on() - elif (self.ac_mode and too_hot) or (not self.ac_mode and too_cold): + elif (self.ac_mode and self._cur_temp > max_temp) or ( + not self.ac_mode and self._cur_temp < min_temp + ): _LOGGER.debug("Turning on heater %s", self.heater_entity_id) await self._async_heater_turn_on() elif time is not None: diff --git a/homeassistant/components/generic_thermostat/strings.json b/homeassistant/components/generic_thermostat/strings.json index 58280e99543..9b88d590eea 100644 --- a/homeassistant/components/generic_thermostat/strings.json +++ b/homeassistant/components/generic_thermostat/strings.json @@ -21,7 +21,7 @@ "heater": "Switch entity used to cool or heat depending on A/C mode.", "target_sensor": "Temperature sensor that reflects the current temperature.", "min_cycle_duration": "Set a minimum amount of time that the switch specified must be in its current state prior to being switched either off or on.", - "cold_tolerance": "Minimum amount of difference between the temperature read by the temperature sensor the target temperature that must change prior to being switched on. For example, if the target temperature is 25 and the tolerance is 0.5 the heater will start when the sensor equals or goes below 24.5.", + "cold_tolerance": "Minimum amount of difference between the temperature read by the temperature sensor the target temperature that must change prior to being switched on. For example, if the target temperature is 25 and the tolerance is 0.5 the heater will start when the sensor goes below 24.5.", "hot_tolerance": "Minimum amount of difference between the temperature read by the temperature sensor the target temperature that must change prior to being switched off. For example, if the target temperature is 25 and the tolerance is 0.5 the heater will stop when the sensor equals or goes above 25.5." } }, diff --git a/homeassistant/components/gogogate2/config_flow.py b/homeassistant/components/gogogate2/config_flow.py index 0348d0b428c..cebff656d5d 100644 --- a/homeassistant/components/gogogate2/config_flow.py +++ b/homeassistant/components/gogogate2/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations import dataclasses +import logging import re from typing import Any, Self @@ -27,6 +28,8 @@ from homeassistant.helpers.service_info.zeroconf import ( from .common import get_api from .const import DEVICE_TYPE_GOGOGATE2, DEVICE_TYPE_ISMARTGATE, DOMAIN +_LOGGER = logging.getLogger(__name__) + DEVICE_NAMES = { DEVICE_TYPE_GOGOGATE2: "Gogogate2", DEVICE_TYPE_ISMARTGATE: "ismartgate", @@ -115,7 +118,8 @@ class Gogogate2FlowHandler(ConfigFlow, domain=DOMAIN): else: errors["base"] = "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "cannot_connect" if self._ip_address and self._device_type: diff --git a/homeassistant/components/google/calendar.py b/homeassistant/components/google/calendar.py index 4ae8c8cce03..a62d2bf1d6b 100644 --- a/homeassistant/components/google/calendar.py +++ b/homeassistant/components/google/calendar.py @@ -89,6 +89,7 @@ OPAQUE = "opaque" RRULE_PREFIX = "RRULE:" SERVICE_CREATE_EVENT = "create_event" +FILTERED_EVENT_TYPES = [EventTypeEnum.BIRTHDAY, EventTypeEnum.WORKING_LOCATION] @dataclasses.dataclass(frozen=True, kw_only=True) @@ -103,7 +104,7 @@ class GoogleCalendarEntityDescription(CalendarEntityDescription): search: str | None local_sync: bool device_id: str - working_location: bool = False + event_type: EventTypeEnum | None = None def _get_entity_descriptions( @@ -173,14 +174,24 @@ def _get_entity_descriptions( local_sync, ) if calendar_item.primary and local_sync: - _LOGGER.debug("work location entity") + # Create a separate calendar for birthdays + entity_descriptions.append( + dataclasses.replace( + entity_description, + key=f"{key}-birthdays", + translation_key="birthdays", + event_type=EventTypeEnum.BIRTHDAY, + name=None, + entity_id=None, + ) + ) # Create an optional disabled by default entity for Work Location entity_descriptions.append( dataclasses.replace( entity_description, key=f"{key}-work-location", translation_key="working_location", - working_location=True, + event_type=EventTypeEnum.WORKING_LOCATION, name=None, entity_id=None, entity_registry_enabled_default=False, @@ -383,8 +394,17 @@ class GoogleCalendarEntity( for attendee in event.attendees ): return False - is_working_location_event = event.event_type == EventTypeEnum.WORKING_LOCATION - if self.entity_description.working_location != is_working_location_event: + # Calendar enttiy may be limited to a specific event type + if ( + self.entity_description.event_type is not None + and self.entity_description.event_type != event.event_type + ): + return False + # Default calendar entity omits the special types but includes all the others + if ( + self.entity_description.event_type is None + and event.event_type in FILTERED_EVENT_TYPES + ): return False if self._ignore_availability: return True diff --git a/homeassistant/components/google/manifest.json b/homeassistant/components/google/manifest.json index 81fd2b07de4..efce97a0d6f 100644 --- a/homeassistant/components/google/manifest.json +++ b/homeassistant/components/google/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/google", "iot_class": "cloud_polling", "loggers": ["googleapiclient"], - "requirements": ["gcal-sync==7.0.0", "oauth2client==4.1.3", "ical==9.0.1"] + "requirements": ["gcal-sync==7.0.0", "oauth2client==4.1.3", "ical==9.0.3"] } diff --git a/homeassistant/components/google/strings.json b/homeassistant/components/google/strings.json index 5ee0cdd9c14..5776fd0480b 100644 --- a/homeassistant/components/google/strings.json +++ b/homeassistant/components/google/strings.json @@ -131,6 +131,9 @@ "calendar": { "working_location": { "name": "Working location" + }, + "birthdays": { + "name": "Birthdays" } } } diff --git a/homeassistant/components/google_cloud/manifest.json b/homeassistant/components/google_cloud/manifest.json index 3e08b6254db..3e6371cbe23 100644 --- a/homeassistant/components/google_cloud/manifest.json +++ b/homeassistant/components/google_cloud/manifest.json @@ -8,7 +8,7 @@ "integration_type": "service", "iot_class": "cloud_push", "requirements": [ - "google-cloud-texttospeech==2.17.2", - "google-cloud-speech==2.27.0" + "google-cloud-texttospeech==2.25.1", + "google-cloud-speech==2.31.1" ] } diff --git a/homeassistant/components/google_cloud/stt.py b/homeassistant/components/google_cloud/stt.py index 41c5a6710b7..cd5055383ea 100644 --- a/homeassistant/components/google_cloud/stt.py +++ b/homeassistant/components/google_cloud/stt.py @@ -6,6 +6,7 @@ from collections.abc import AsyncGenerator, AsyncIterable import logging from google.api_core.exceptions import GoogleAPIError, Unauthenticated +from google.api_core.retry import AsyncRetry from google.cloud import speech_v1 from homeassistant.components.stt import ( @@ -127,6 +128,7 @@ class GoogleCloudSpeechToTextEntity(SpeechToTextEntity): responses = await self._client.streaming_recognize( requests=request_generator(), timeout=10, + retry=AsyncRetry(initial=0.1, maximum=2.0, multiplier=2.0), ) transcript = "" diff --git a/homeassistant/components/google_cloud/tts.py b/homeassistant/components/google_cloud/tts.py index 1f5f838b593..16519645dee 100644 --- a/homeassistant/components/google_cloud/tts.py +++ b/homeassistant/components/google_cloud/tts.py @@ -7,6 +7,7 @@ from pathlib import Path from typing import Any, cast from google.api_core.exceptions import GoogleAPIError, Unauthenticated +from google.api_core.retry import AsyncRetry from google.cloud import texttospeech import voluptuous as vol @@ -215,7 +216,11 @@ class BaseGoogleCloudProvider: ), ) - response = await self._client.synthesize_speech(request, timeout=10) + response = await self._client.synthesize_speech( + request, + timeout=10, + retry=AsyncRetry(initial=0.1, maximum=2.0, multiplier=2.0), + ) if encoding == texttospeech.AudioEncoding.MP3: extension = "mp3" diff --git a/homeassistant/components/google_generative_ai_conversation/__init__.py b/homeassistant/components/google_generative_ai_conversation/__init__.py index c32d7b5ddea..88a51446cda 100644 --- a/homeassistant/components/google_generative_ai_conversation/__init__.py +++ b/homeassistant/components/google_generative_ai_conversation/__init__.py @@ -5,7 +5,7 @@ from __future__ import annotations import mimetypes from pathlib import Path -from google import genai # type: ignore[attr-defined] +from google.genai import Client from google.genai.errors import APIError, ClientError from requests.exceptions import Timeout import voluptuous as vol @@ -43,7 +43,7 @@ CONF_FILENAMES = "filenames" CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) PLATFORMS = (Platform.CONVERSATION,) -type GoogleGenerativeAIConfigEntry = ConfigEntry[genai.Client] +type GoogleGenerativeAIConfigEntry = ConfigEntry[Client] async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: @@ -139,7 +139,11 @@ async def async_setup_entry( """Set up Google Generative AI Conversation from a config entry.""" try: - client = genai.Client(api_key=entry.data[CONF_API_KEY]) + + def _init_client() -> Client: + return Client(api_key=entry.data[CONF_API_KEY]) + + client = await hass.async_add_executor_job(_init_client) await client.aio.models.get( model=entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL), config={"http_options": {"timeout": TIMEOUT_MILLIS}}, diff --git a/homeassistant/components/google_generative_ai_conversation/config_flow.py b/homeassistant/components/google_generative_ai_conversation/config_flow.py index b413f9c9a62..b7753c21bf9 100644 --- a/homeassistant/components/google_generative_ai_conversation/config_flow.py +++ b/homeassistant/components/google_generative_ai_conversation/config_flow.py @@ -7,7 +7,7 @@ import logging from types import MappingProxyType from typing import Any -from google import genai # type: ignore[attr-defined] +from google import genai from google.genai.errors import APIError, ClientError from requests.exceptions import Timeout import voluptuous as vol diff --git a/homeassistant/components/google_generative_ai_conversation/conversation.py b/homeassistant/components/google_generative_ai_conversation/conversation.py index cca5f2410bd..5460f48f20e 100644 --- a/homeassistant/components/google_generative_ai_conversation/conversation.py +++ b/homeassistant/components/google_generative_ai_conversation/conversation.py @@ -171,17 +171,25 @@ def _escape_decode(value: Any) -> Any: return value +def _create_google_tool_response_parts( + parts: list[conversation.ToolResultContent], +) -> list[Part]: + """Create Google tool response parts.""" + return [ + Part.from_function_response( + name=tool_result.tool_name, response=tool_result.tool_result + ) + for tool_result in parts + ] + + def _create_google_tool_response_content( content: list[conversation.ToolResultContent], ) -> Content: """Create a Google tool response content.""" return Content( - parts=[ - Part.from_function_response( - name=tool_result.tool_name, response=tool_result.tool_result - ) - for tool_result in content - ] + role="user", + parts=_create_google_tool_response_parts(content), ) @@ -402,7 +410,7 @@ class GoogleGenerativeAIConversationEntity( chat = self._genai_client.aio.chats.create( model=model_name, history=messages, config=generateContentConfig ) - chat_request: str | Content = user_input.text + chat_request: str | list[Part] = user_input.text # To prevent infinite loops, we limit the number of iterations for _iteration in range(MAX_TOOL_ITERATIONS): try: @@ -456,7 +464,7 @@ class GoogleGenerativeAIConversationEntity( ) ) - chat_request = _create_google_tool_response_content( + chat_request = _create_google_tool_response_parts( [ tool_response async for tool_response in chat_log.async_add_assistant_content( diff --git a/homeassistant/components/google_generative_ai_conversation/manifest.json b/homeassistant/components/google_generative_ai_conversation/manifest.json index ed215970d7f..25e44964a6d 100644 --- a/homeassistant/components/google_generative_ai_conversation/manifest.json +++ b/homeassistant/components/google_generative_ai_conversation/manifest.json @@ -8,5 +8,5 @@ "documentation": "https://www.home-assistant.io/integrations/google_generative_ai_conversation", "integration_type": "service", "iot_class": "cloud_polling", - "requirements": ["google-genai==1.1.0"] + "requirements": ["google-genai==1.7.0"] } diff --git a/homeassistant/components/gree/strings.json b/homeassistant/components/gree/strings.json index 45911433b92..403cf7d45fc 100644 --- a/homeassistant/components/gree/strings.json +++ b/homeassistant/components/gree/strings.json @@ -16,13 +16,13 @@ "name": "Panel light" }, "quiet": { - "name": "Quiet" + "name": "Quiet mode" }, "fresh_air": { "name": "Fresh air" }, "xfan": { - "name": "XFan" + "name": "Xtra fan" }, "health_mode": { "name": "Health mode" diff --git a/homeassistant/components/habitica/const.py b/homeassistant/components/habitica/const.py index 8b745ff2b99..7a5677cb687 100644 --- a/homeassistant/components/habitica/const.py +++ b/homeassistant/components/habitica/const.py @@ -79,6 +79,7 @@ SERVICE_CREATE_HABIT = "create_habit" SERVICE_UPDATE_TODO = "update_todo" SERVICE_CREATE_TODO = "create_todo" SERVICE_UPDATE_DAILY = "update_daily" +SERVICE_CREATE_DAILY = "create_daily" DEVELOPER_ID = "4c4ca53f-c059-4ffa-966e-9d29dd405daf" X_CLIENT = f"{DEVELOPER_ID} - {APPLICATION_NAME} {__version__}" diff --git a/homeassistant/components/habitica/icons.json b/homeassistant/components/habitica/icons.json index fcb9ec56fa7..aac90814af5 100644 --- a/homeassistant/components/habitica/icons.json +++ b/homeassistant/components/habitica/icons.json @@ -270,6 +270,14 @@ "repeat_weekly_options": "mdi:calendar-refresh", "repeat_monthly_options": "mdi:calendar-refresh" } + }, + "create_daily": { + "service": "mdi:calendar-month", + "sections": { + "developer_options": "mdi:test-tube", + "repeat_weekly_options": "mdi:calendar-refresh", + "repeat_monthly_options": "mdi:calendar-refresh" + } } } } diff --git a/homeassistant/components/habitica/services.py b/homeassistant/components/habitica/services.py index 9fb0b0b7537..bcbd6caa7a7 100644 --- a/homeassistant/components/habitica/services.py +++ b/homeassistant/components/habitica/services.py @@ -84,6 +84,7 @@ from .const import ( SERVICE_API_CALL, SERVICE_CANCEL_QUEST, SERVICE_CAST_SKILL, + SERVICE_CREATE_DAILY, SERVICE_CREATE_HABIT, SERVICE_CREATE_REWARD, SERVICE_CREATE_TODO, @@ -243,6 +244,7 @@ SERVICE_TASK_TYPE_MAP = { SERVICE_UPDATE_TODO: TaskType.TODO, SERVICE_CREATE_TODO: TaskType.TODO, SERVICE_UPDATE_DAILY: TaskType.DAILY, + SERVICE_CREATE_DAILY: TaskType.DAILY, } @@ -913,7 +915,12 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901 schema=SERVICE_UPDATE_TASK_SCHEMA, supports_response=SupportsResponse.ONLY, ) - for service in (SERVICE_CREATE_HABIT, SERVICE_CREATE_REWARD, SERVICE_CREATE_TODO): + for service in ( + SERVICE_CREATE_DAILY, + SERVICE_CREATE_HABIT, + SERVICE_CREATE_REWARD, + SERVICE_CREATE_TODO, + ): hass.services.async_register( DOMAIN, service, diff --git a/homeassistant/components/habitica/services.yaml b/homeassistant/components/habitica/services.yaml index 46b3211790e..3fb25e2b4b7 100644 --- a/homeassistant/components/habitica/services.yaml +++ b/homeassistant/components/habitica/services.yaml @@ -347,11 +347,11 @@ update_daily: notes: *notes checklist_options: *checklist_options priority: *priority - start_date: + start_date: &start_date required: false selector: date: - frequency: + frequency: &frequency_daily required: false selector: select: @@ -362,7 +362,7 @@ update_daily: - "yearly" translation_key: "frequency" mode: dropdown - every_x: + every_x: &every_x required: false selector: number: @@ -370,7 +370,7 @@ update_daily: step: 1 unit_of_measurement: "🔃" mode: box - repeat_weekly_options: + repeat_weekly_options: &repeat_weekly_options collapsed: true fields: repeat: @@ -388,7 +388,7 @@ update_daily: mode: list translation_key: repeat multiple: true - repeat_monthly_options: + repeat_monthly_options: &repeat_monthly_options collapsed: true fields: repeat_monthly: @@ -403,7 +403,7 @@ update_daily: reminder_options: collapsed: true fields: - reminder: + reminder: &reminder_daily required: false selector: text: @@ -420,7 +420,7 @@ update_daily: developer_options: collapsed: true fields: - streak: + streak: &streak required: false selector: number: @@ -429,3 +429,18 @@ update_daily: unit_of_measurement: "▶▶" mode: box alias: *alias +create_daily: + fields: + config_entry: *config_entry + name: *name + notes: *notes + add_checklist_item: *add_checklist_item + priority: *priority + start_date: *start_date + frequency: *frequency_daily + every_x: *every_x + repeat_weekly_options: *repeat_weekly_options + repeat_monthly_options: *repeat_monthly_options + reminder: *reminder_daily + tag: *tag + developer_options: *developer_options diff --git a/homeassistant/components/habitica/strings.json b/homeassistant/components/habitica/strings.json index fac0fdf3868..695eb1576fe 100644 --- a/homeassistant/components/habitica/strings.json +++ b/homeassistant/components/habitica/strings.json @@ -52,7 +52,19 @@ "reminder_options_description": "Add, remove or clear reminders of a Habitica task.", "date_name": "Due date", "date_description": "The to-do's due date.", - "repeat_name": "Repeat on" + "repeat_name": "Repeat on", + "start_date_name": "Start date", + "start_date_description": "Defines when the daily task becomes active and specifies the exact weekday or day of the month it repeats on.", + "frequency_daily_name": "Repeat interval", + "frequency_daily_description": "The repetition interval of a daily.", + "every_x_name": "Repeat every X", + "every_x_description": "The number of intervals (days, weeks, months, or years) after which the daily repeats, based on the chosen repetition interval. A value of 0 makes the daily inactive ('Grey Daily').", + "repeat_weekly_description": "The days of the week the daily repeats.", + "repeat_monthly_description": "Whether a monthly recurring task repeats on the same calendar day each month or on the same weekday and week of the month, based on the start date.", + "repeat_weekly_options_name": "Weekly repeat days", + "repeat_weekly_options_description": "Options related to weekly repetition, applicable when the repetition interval is set to weekly.", + "repeat_monthly_options_name": "Monthly repeat day", + "repeat_monthly_options_description": "Options related to monthly repetition, applicable when the repetition interval is set to monthly." }, "config": { "abort": { @@ -1076,24 +1088,24 @@ "description": "[%key:component::habitica::common::priority_description%]" }, "start_date": { - "name": "Start date", - "description": "Defines when the daily task becomes active and specifies the exact weekday or day of the month it repeats on." + "name": "[%key:component::habitica::common::start_date_name%]", + "description": "[%key:component::habitica::common::start_date_description%]" }, "frequency": { - "name": "Repeat interval", - "description": "The repetition interval of a daily." + "name": "[%key:component::habitica::common::frequency_daily_name%]", + "description": "[%key:component::habitica::common::frequency_daily_description%]" }, "every_x": { - "name": "Repeat every X", - "description": "The number of intervals (days, weeks, months, or years) after which the daily repeats, based on the chosen repetition interval. A value of 0 makes the daily inactive ('Grey Daily')." + "name": "[%key:component::habitica::common::every_x_name%]", + "description": "[%key:component::habitica::common::every_x_description%]" }, "repeat": { "name": "[%key:component::habitica::common::repeat_name%]", - "description": "The days of the week the daily repeats." + "description": "[%key:component::habitica::common::repeat_weekly_description%]" }, "repeat_monthly": { "name": "[%key:component::habitica::common::repeat_name%]", - "description": "Whether a monthly recurring task repeats on the same calendar day each month or on the same weekday and week of the month, based on the start date." + "description": "[%key:component::habitica::common::repeat_monthly_description%]" }, "add_checklist_item": { "name": "[%key:component::habitica::common::add_checklist_item_name%]", @@ -1134,12 +1146,12 @@ "description": "[%key:component::habitica::common::checklist_options_description%]" }, "repeat_weekly_options": { - "name": "Weekly repeat days", - "description": "Options related to weekly repetition, applicable when the repetition interval is set to weekly." + "name": "[%key:component::habitica::common::repeat_weekly_options_name%]", + "description": "[%key:component::habitica::common::repeat_weekly_options_description%]" }, "repeat_monthly_options": { - "name": "Monthly repeat day", - "description": "Options related to monthly repetition, applicable when the repetition interval is set to monthly." + "name": "[%key:component::habitica::common::repeat_monthly_options_name%]", + "description": "[%key:component::habitica::common::repeat_monthly_options_description%]" }, "tag_options": { "name": "[%key:component::habitica::common::tag_options_name%]", @@ -1154,6 +1166,78 @@ "description": "[%key:component::habitica::common::reminder_options_description%]" } } + }, + "create_daily": { + "name": "Create a daily", + "description": "Adds a new daily.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "[%key:component::habitica::common::config_entry_description%]" + }, + "name": { + "name": "[%key:component::habitica::common::task_name%]", + "description": "[%key:component::habitica::common::name_description%]" + }, + "notes": { + "name": "[%key:component::habitica::common::notes_name%]", + "description": "[%key:component::habitica::common::notes_description%]" + }, + "tag": { + "name": "[%key:component::habitica::common::tag_options_name%]", + "description": "[%key:component::habitica::common::tag_description%]" + }, + "alias": { + "name": "[%key:component::habitica::common::alias_name%]", + "description": "[%key:component::habitica::common::alias_description%]" + }, + "priority": { + "name": "[%key:component::habitica::common::priority_name%]", + "description": "[%key:component::habitica::common::priority_description%]" + }, + "start_date": { + "name": "[%key:component::habitica::common::start_date_name%]", + "description": "[%key:component::habitica::common::start_date_description%]" + }, + "frequency": { + "name": "[%key:component::habitica::common::frequency_daily_name%]", + "description": "[%key:component::habitica::common::frequency_daily_description%]" + }, + "every_x": { + "name": "[%key:component::habitica::common::every_x_name%]", + "description": "[%key:component::habitica::common::every_x_description%]" + }, + "repeat": { + "name": "[%key:component::habitica::common::repeat_name%]", + "description": "[%key:component::habitica::common::repeat_weekly_description%]" + }, + "repeat_monthly": { + "name": "[%key:component::habitica::common::repeat_name%]", + "description": "[%key:component::habitica::common::repeat_monthly_description%]" + }, + "add_checklist_item": { + "name": "[%key:component::habitica::common::checklist_options_name%]", + "description": "[%key:component::habitica::common::add_checklist_item_description%]" + }, + "reminder": { + "name": "[%key:component::habitica::common::reminder_options_name%]", + "description": "[%key:component::habitica::common::reminder_description%]" + } + }, + "sections": { + "repeat_weekly_options": { + "name": "[%key:component::habitica::common::repeat_weekly_options_name%]", + "description": "[%key:component::habitica::common::repeat_weekly_options_description%]" + }, + "repeat_monthly_options": { + "name": "[%key:component::habitica::common::repeat_monthly_options_name%]", + "description": "[%key:component::habitica::common::repeat_monthly_options_description%]" + }, + "developer_options": { + "name": "[%key:component::habitica::common::developer_options_name%]", + "description": "[%key:component::habitica::common::developer_options_description%]" + } + } } }, "selector": { diff --git a/homeassistant/components/heos/const.py b/homeassistant/components/heos/const.py index 6d603f7ad30..b83da128c91 100644 --- a/homeassistant/components/heos/const.py +++ b/homeassistant/components/heos/const.py @@ -2,10 +2,13 @@ ATTR_PASSWORD = "password" ATTR_USERNAME = "username" +ATTR_QUEUE_IDS = "queue_ids" DOMAIN = "heos" ENTRY_TITLE = "HEOS System" +SERVICE_GET_QUEUE = "get_queue" SERVICE_GROUP_VOLUME_SET = "group_volume_set" SERVICE_GROUP_VOLUME_DOWN = "group_volume_down" SERVICE_GROUP_VOLUME_UP = "group_volume_up" +SERVICE_REMOVE_FROM_QUEUE = "remove_from_queue" SERVICE_SIGN_IN = "sign_in" SERVICE_SIGN_OUT = "sign_out" diff --git a/homeassistant/components/heos/icons.json b/homeassistant/components/heos/icons.json index d7a998b6aec..c11b499fc0b 100644 --- a/homeassistant/components/heos/icons.json +++ b/homeassistant/components/heos/icons.json @@ -1,5 +1,11 @@ { "services": { + "get_queue": { + "service": "mdi:playlist-music" + }, + "remove_from_queue": { + "service": "mdi:playlist-remove" + }, "group_volume_set": { "service": "mdi:volume-medium" }, diff --git a/homeassistant/components/heos/media_player.py b/homeassistant/components/heos/media_player.py index 311190ccb74..65314439c18 100644 --- a/homeassistant/components/heos/media_player.py +++ b/homeassistant/components/heos/media_player.py @@ -4,6 +4,7 @@ from __future__ import annotations from collections.abc import Awaitable, Callable, Coroutine, Sequence from contextlib import suppress +import dataclasses from datetime import datetime from functools import reduce, wraps import logging @@ -23,12 +24,10 @@ from pyheos import ( const as heos_const, ) from pyheos.util import mediauri as heos_source -import voluptuous as vol from homeassistant.components import media_source from homeassistant.components.media_player import ( ATTR_MEDIA_ENQUEUE, - ATTR_MEDIA_VOLUME_LEVEL, BrowseError, BrowseMedia, MediaClass, @@ -42,24 +41,16 @@ from homeassistant.components.media_player import ( ) from homeassistant.components.media_source import BrowseMediaSource from homeassistant.const import Platform -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant, ServiceResponse, callback from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from homeassistant.helpers import ( - config_validation as cv, - entity_platform, - entity_registry as er, -) +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util.dt import utcnow -from .const import ( - DOMAIN as HEOS_DOMAIN, - SERVICE_GROUP_VOLUME_DOWN, - SERVICE_GROUP_VOLUME_SET, - SERVICE_GROUP_VOLUME_UP, -) +from . import services +from .const import DOMAIN as HEOS_DOMAIN from .coordinator import HeosConfigEntry, HeosCoordinator PARALLEL_UPDATES = 0 @@ -130,19 +121,7 @@ async def async_setup_entry( async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Add media players for a config entry.""" - # Register custom entity services - platform = entity_platform.async_get_current_platform() - platform.async_register_entity_service( - SERVICE_GROUP_VOLUME_SET, - {vol.Required(ATTR_MEDIA_VOLUME_LEVEL): cv.small_float}, - "async_set_group_volume_level", - ) - platform.async_register_entity_service( - SERVICE_GROUP_VOLUME_DOWN, None, "async_group_volume_down" - ) - platform.async_register_entity_service( - SERVICE_GROUP_VOLUME_UP, None, "async_group_volume_up" - ) + services.register_media_player_services() def add_entities_callback(players: Sequence[HeosPlayer]) -> None: """Add entities for each player.""" @@ -155,20 +134,20 @@ async def async_setup_entry( add_entities_callback(list(coordinator.heos.players.values())) -type _FuncType[**_P] = Callable[_P, Awaitable[Any]] -type _ReturnFuncType[**_P] = Callable[_P, Coroutine[Any, Any, None]] +type _FuncType[**_P, _R] = Callable[_P, Awaitable[_R]] +type _ReturnFuncType[**_P, _R] = Callable[_P, Coroutine[Any, Any, _R]] -def catch_action_error[**_P]( +def catch_action_error[**_P, _R]( action: str, -) -> Callable[[_FuncType[_P]], _ReturnFuncType[_P]]: +) -> Callable[[_FuncType[_P, _R]], _ReturnFuncType[_P, _R]]: """Return decorator that catches errors and raises HomeAssistantError.""" - def decorator(func: _FuncType[_P]) -> _ReturnFuncType[_P]: + def decorator(func: _FuncType[_P, _R]) -> _ReturnFuncType[_P, _R]: @wraps(func) - async def wrapper(*args: _P.args, **kwargs: _P.kwargs) -> None: + async def wrapper(*args: _P.args, **kwargs: _P.kwargs) -> _R: try: - await func(*args, **kwargs) + return await func(*args, **kwargs) except (HeosError, ValueError) as ex: raise HomeAssistantError( translation_domain=HEOS_DOMAIN, @@ -268,6 +247,12 @@ class HeosMediaPlayer(CoordinatorEntity[HeosCoordinator], MediaPlayerEntity): self.async_on_remove(self._player.add_on_player_event(self._player_update)) await super().async_added_to_hass() + @catch_action_error("get queue") + async def async_get_queue(self) -> ServiceResponse: + """Get the queue for the current player.""" + queue = await self._player.get_queue() + return {"queue": [dataclasses.asdict(item) for item in queue]} + @catch_action_error("clear playlist") async def async_clear_playlist(self) -> None: """Clear players playlist.""" @@ -368,6 +353,15 @@ class HeosMediaPlayer(CoordinatorEntity[HeosCoordinator], MediaPlayerEntity): await self._player.play_preset_station(index) return + if media_type == "queue": + # media_id must be an int + try: + queue_id = int(media_id) + except ValueError: + raise ValueError(f"Invalid queue id '{media_id}'") from None + await self._player.play_queue(queue_id) + return + raise ValueError(f"Unsupported media type '{media_type}'") @catch_action_error("select source") @@ -481,6 +475,10 @@ class HeosMediaPlayer(CoordinatorEntity[HeosCoordinator], MediaPlayerEntity): await self.coordinator.heos.set_group(new_members) return + async def async_remove_from_queue(self, queue_ids: list[int]) -> None: + """Remove items from the queue.""" + await self._player.remove_from_queue(queue_ids) + @property def available(self) -> bool: """Return True if the device is available.""" diff --git a/homeassistant/components/heos/services.py b/homeassistant/components/heos/services.py index dc11bb7a76d..fe8c887691c 100644 --- a/homeassistant/components/heos/services.py +++ b/homeassistant/components/heos/services.py @@ -1,19 +1,33 @@ """Services for the HEOS integration.""" +from dataclasses import dataclass import logging +from typing import Final from pyheos import CommandAuthenticationError, Heos, HeosError import voluptuous as vol +from homeassistant.components.media_player import ATTR_MEDIA_VOLUME_LEVEL from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.core import HomeAssistant, ServiceCall, SupportsResponse from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from homeassistant.helpers import config_validation as cv, issue_registry as ir +from homeassistant.helpers import ( + config_validation as cv, + entity_platform, + issue_registry as ir, +) +from homeassistant.helpers.typing import VolDictType, VolSchemaType from .const import ( ATTR_PASSWORD, + ATTR_QUEUE_IDS, ATTR_USERNAME, DOMAIN, + SERVICE_GET_QUEUE, + SERVICE_GROUP_VOLUME_DOWN, + SERVICE_GROUP_VOLUME_SET, + SERVICE_GROUP_VOLUME_UP, + SERVICE_REMOVE_FROM_QUEUE, SERVICE_SIGN_IN, SERVICE_SIGN_OUT, ) @@ -44,6 +58,62 @@ def register(hass: HomeAssistant) -> None: ) +@dataclass(frozen=True) +class EntityServiceDescription: + """Describe an entity service.""" + + name: str + method_name: str + schema: VolDictType | VolSchemaType | None = None + supports_response: SupportsResponse = SupportsResponse.NONE + + def async_register(self, platform: entity_platform.EntityPlatform) -> None: + """Register the service with the platform.""" + platform.async_register_entity_service( + self.name, + self.schema, + self.method_name, + supports_response=self.supports_response, + ) + + +REMOVE_FROM_QUEUE_SCHEMA: Final[VolDictType] = { + vol.Required(ATTR_QUEUE_IDS): vol.All( + cv.ensure_list, + [vol.All(cv.positive_int, vol.Range(min=1))], + vol.Unique(), + ) +} +GROUP_VOLUME_SET_SCHEMA: Final[VolDictType] = { + vol.Required(ATTR_MEDIA_VOLUME_LEVEL): cv.small_float +} + +MEDIA_PLAYER_ENTITY_SERVICES: Final = ( + # Player queue services + EntityServiceDescription( + SERVICE_GET_QUEUE, "async_get_queue", supports_response=SupportsResponse.ONLY + ), + EntityServiceDescription( + SERVICE_REMOVE_FROM_QUEUE, "async_remove_from_queue", REMOVE_FROM_QUEUE_SCHEMA + ), + # Group volume services + EntityServiceDescription( + SERVICE_GROUP_VOLUME_SET, + "async_set_group_volume_level", + GROUP_VOLUME_SET_SCHEMA, + ), + EntityServiceDescription(SERVICE_GROUP_VOLUME_DOWN, "async_group_volume_down"), + EntityServiceDescription(SERVICE_GROUP_VOLUME_UP, "async_group_volume_up"), +) + + +def register_media_player_services() -> None: + """Register media_player entity services.""" + platform = entity_platform.async_get_current_platform() + for service in MEDIA_PLAYER_ENTITY_SERVICES: + service.async_register(platform) + + def _get_controller(hass: HomeAssistant) -> Heos: """Get the HEOS controller instance.""" _LOGGER.warning( diff --git a/homeassistant/components/heos/services.yaml b/homeassistant/components/heos/services.yaml index 8f3a43421f6..fd74b2f90c4 100644 --- a/homeassistant/components/heos/services.yaml +++ b/homeassistant/components/heos/services.yaml @@ -1,3 +1,22 @@ +get_queue: + target: + entity: + integration: heos + domain: media_player + +remove_from_queue: + target: + entity: + integration: heos + domain: media_player + fields: + queue_ids: + required: true + selector: + text: + multiple: true + type: number + group_volume_set: target: entity: diff --git a/homeassistant/components/heos/strings.json b/homeassistant/components/heos/strings.json index 593c437accc..982d15a06fa 100644 --- a/homeassistant/components/heos/strings.json +++ b/homeassistant/components/heos/strings.json @@ -86,6 +86,20 @@ } } }, + "get_queue": { + "name": "Get queue", + "description": "Retrieves the queue of the media player." + }, + "remove_from_queue": { + "name": "Remove from queue", + "description": "Removes items from the play queue.", + "fields": { + "queue_ids": { + "name": "Queue IDs", + "description": "The IDs (indexes) of the items in the queue to remove." + } + } + }, "group_volume_down": { "name": "Turn down group volume", "description": "Turns down the group volume." diff --git a/homeassistant/components/hko/config_flow.py b/homeassistant/components/hko/config_flow.py index 8548bb4767d..1e2a6230455 100644 --- a/homeassistant/components/hko/config_flow.py +++ b/homeassistant/components/hko/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from asyncio import timeout +import logging from typing import Any from hko import HKO, LOCATIONS, HKOError @@ -15,6 +16,8 @@ from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig from .const import API_RHRREAD, DEFAULT_LOCATION, DOMAIN, KEY_LOCATION +_LOGGER = logging.getLogger(__name__) + def get_loc_name(item): """Return an array of supported locations.""" @@ -54,7 +57,8 @@ class HKOConfigFlow(ConfigFlow, domain=DOMAIN): except HKOError: errors["base"] = "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: await self.async_set_unique_id( diff --git a/homeassistant/components/holiday/manifest.json b/homeassistant/components/holiday/manifest.json index ec47b222370..4c73210c36e 100644 --- a/homeassistant/components/holiday/manifest.json +++ b/homeassistant/components/holiday/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/holiday", "iot_class": "local_polling", - "requirements": ["holidays==0.68", "babel==2.15.0"] + "requirements": ["holidays==0.69", "babel==2.15.0"] } diff --git a/homeassistant/components/holiday/strings.json b/homeassistant/components/holiday/strings.json index d464f9e8bfd..6e317b8fa7b 100644 --- a/homeassistant/components/holiday/strings.json +++ b/homeassistant/components/holiday/strings.json @@ -8,7 +8,7 @@ "step": { "user": { "data": { - "country": "Country" + "country": "[%key:common::config_flow::data::country%]" } }, "options": { diff --git a/homeassistant/components/home_connect/binary_sensor.py b/homeassistant/components/home_connect/binary_sensor.py index b7b7e50047e..a28b4ff2b49 100644 --- a/homeassistant/components/home_connect/binary_sensor.py +++ b/homeassistant/components/home_connect/binary_sensor.py @@ -244,6 +244,7 @@ class HomeConnectDoorBinarySensor(HomeConnectBinarySensor): BSH_DOOR_STATE_LOCKED: False, BSH_DOOR_STATE_OPEN: True, }, + entity_registry_enabled_default=False, ), ) self._attr_unique_id = f"{appliance.info.ha_id}-Door" @@ -283,7 +284,8 @@ class HomeConnectDoorBinarySensor(HomeConnectBinarySensor): DOMAIN, f"deprecated_binary_common_door_sensor_{self.entity_id}", breaks_in_ha_version="2025.5.0", - is_fixable=False, + is_fixable=True, + is_persistent=True, severity=IssueSeverity.WARNING, translation_key="deprecated_binary_common_door_sensor", translation_placeholders={ diff --git a/homeassistant/components/home_connect/coordinator.py b/homeassistant/components/home_connect/coordinator.py index 495b4efab32..5e24ed25abd 100644 --- a/homeassistant/components/home_connect/coordinator.py +++ b/homeassistant/components/home_connect/coordinator.py @@ -5,6 +5,7 @@ from __future__ import annotations from asyncio import sleep as asyncio_sleep from collections import defaultdict from collections.abc import Callable +from contextlib import suppress from dataclasses import dataclass import logging from typing import Any, cast @@ -119,8 +120,11 @@ class HomeConnectCoordinator( self.__dict__.pop("context_listeners", None) def remove_listener_and_invalidate_context_listeners() -> None: - remove_listener() - self.__dict__.pop("context_listeners", None) + # There are cases where the remove_listener will be called + # although it has been already removed somewhere else + with suppress(KeyError): + remove_listener() + self.__dict__.pop("context_listeners", None) return remove_listener_and_invalidate_context_listeners @@ -155,7 +159,7 @@ class HomeConnectCoordinator( f"home_connect-events_listener_task-{self.config_entry.entry_id}", ) - async def _event_listener(self) -> None: # noqa: C901 + async def _event_listener(self) -> None: """Match event with listener for event type.""" retry_time = 10 while True: @@ -279,13 +283,6 @@ class HomeConnectCoordinator( ) break - # Trigger to delete the possible depaired device entities - # from known_entities variable at common.py - for listener, context in self._special_listeners.values(): - assert isinstance(context, tuple) - if EventKey.BSH_COMMON_APPLIANCE_DEPAIRED in context: - listener() - @callback def _call_event_listener(self, event_message: EventMessage) -> None: """Call listener for event.""" @@ -389,6 +386,13 @@ class HomeConnectCoordinator( remove_config_entry_id=self.config_entry.entry_id, ) + # Trigger to delete the possible depaired device entities + # from known_entities variable at common.py + for listener, context in self._special_listeners.values(): + assert isinstance(context, tuple) + if EventKey.BSH_COMMON_APPLIANCE_DEPAIRED in context: + listener() + async def _get_appliance_data( self, appliance: HomeAppliance, diff --git a/homeassistant/components/home_connect/light.py b/homeassistant/components/home_connect/light.py index 707620f099a..de55a60bd43 100644 --- a/homeassistant/components/home_connect/light.py +++ b/homeassistant/components/home_connect/light.py @@ -207,11 +207,13 @@ class HomeConnectLight(HomeConnectEntity, LightEntity): brightness = round( color_util.brightness_to_value( self._brightness_scale, - kwargs.get(ATTR_BRIGHTNESS, self._attr_brightness), + cast(int, kwargs.get(ATTR_BRIGHTNESS, self._attr_brightness)), ) ) - hs_color = kwargs.get(ATTR_HS_COLOR, self._attr_hs_color) + hs_color = cast( + tuple[float, float], kwargs.get(ATTR_HS_COLOR, self._attr_hs_color) + ) rgb = color_util.color_hsv_to_RGB(hs_color[0], hs_color[1], brightness) hex_val = color_util.color_rgb_to_hex(*rgb) diff --git a/homeassistant/components/home_connect/number.py b/homeassistant/components/home_connect/number.py index 99fe6c17296..1bb793f4015 100644 --- a/homeassistant/components/home_connect/number.py +++ b/homeassistant/components/home_connect/number.py @@ -1,4 +1,4 @@ -"""Provides number enties for Home Connect.""" +"""Provides number entities for Home Connect.""" import logging from typing import cast @@ -26,6 +26,11 @@ _LOGGER = logging.getLogger(__name__) PARALLEL_UPDATES = 1 NUMBERS = ( + NumberEntityDescription( + key=SettingKey.BSH_COMMON_ALARM_CLOCK, + device_class=NumberDeviceClass.DURATION, + translation_key="alarm_clock", + ), NumberEntityDescription( key=SettingKey.REFRIGERATION_FRIDGE_FREEZER_SETPOINT_TEMPERATURE_REFRIGERATOR, device_class=NumberDeviceClass.TEMPERATURE, diff --git a/homeassistant/components/home_connect/sensor.py b/homeassistant/components/home_connect/sensor.py index 796af8260fc..0f0161971a2 100644 --- a/homeassistant/components/home_connect/sensor.py +++ b/homeassistant/components/home_connect/sensor.py @@ -1,7 +1,10 @@ """Provides a sensor for Home Connect.""" +from collections import defaultdict +from collections.abc import Callable from dataclasses import dataclass from datetime import timedelta +from functools import partial import logging from typing import cast @@ -14,7 +17,7 @@ from homeassistant.components.sensor import ( SensorStateClass, ) from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfVolume -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.util import dt as dt_util, slugify @@ -42,7 +45,6 @@ class HomeConnectSensorEntityDescription( ): """Entity Description class for sensors.""" - default_value: str | None = None appliance_types: tuple[str, ...] | None = None fetch_unit: bool = False @@ -53,7 +55,7 @@ BSH_PROGRAM_SENSORS = ( device_class=SensorDeviceClass.TIMESTAMP, translation_key="program_finish_time", appliance_types=( - "CoffeMaker", + "CoffeeMaker", "CookProcessor", "Dishwasher", "Dryer", @@ -195,58 +197,59 @@ SENSORS = ( EVENT_SENSORS = ( HomeConnectSensorEntityDescription( - key=EventKey.REFRIGERATION_FRIDGE_FREEZER_EVENT_DOOR_ALARM_FREEZER, + key=EventKey.BSH_COMMON_EVENT_PROGRAM_ABORTED, device_class=SensorDeviceClass.ENUM, options=EVENT_OPTIONS, - default_value="off", - translation_key="freezer_door_alarm", - appliance_types=("FridgeFreezer", "Freezer"), + translation_key="program_aborted", + appliance_types=("Dishwasher", "CleaningRobot", "CookProcessor"), ), HomeConnectSensorEntityDescription( - key=EventKey.REFRIGERATION_FRIDGE_FREEZER_EVENT_DOOR_ALARM_REFRIGERATOR, + key=EventKey.BSH_COMMON_EVENT_PROGRAM_FINISHED, device_class=SensorDeviceClass.ENUM, options=EVENT_OPTIONS, - default_value="off", - translation_key="refrigerator_door_alarm", - appliance_types=("FridgeFreezer", "Refrigerator"), + translation_key="program_finished", + appliance_types=( + "Oven", + "Dishwasher", + "Washer", + "Dryer", + "WasherDryer", + "CleaningRobot", + "CookProcessor", + ), ), HomeConnectSensorEntityDescription( - key=EventKey.REFRIGERATION_FRIDGE_FREEZER_EVENT_TEMPERATURE_ALARM_FREEZER, + key=EventKey.BSH_COMMON_EVENT_ALARM_CLOCK_ELAPSED, device_class=SensorDeviceClass.ENUM, options=EVENT_OPTIONS, - default_value="off", - translation_key="freezer_temperature_alarm", - appliance_types=("FridgeFreezer", "Freezer"), + translation_key="alarm_clock_elapsed", + appliance_types=("Oven", "Cooktop"), ), HomeConnectSensorEntityDescription( - key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_BEAN_CONTAINER_EMPTY, + key=EventKey.COOKING_OVEN_EVENT_PREHEAT_FINISHED, device_class=SensorDeviceClass.ENUM, options=EVENT_OPTIONS, - default_value="off", - translation_key="bean_container_empty", - appliance_types=("CoffeeMaker",), + translation_key="preheat_finished", + appliance_types=("Oven", "Cooktop"), ), HomeConnectSensorEntityDescription( - key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_WATER_TANK_EMPTY, + key=EventKey.COOKING_OVEN_EVENT_REGULAR_PREHEAT_FINISHED, device_class=SensorDeviceClass.ENUM, options=EVENT_OPTIONS, - default_value="off", - translation_key="water_tank_empty", - appliance_types=("CoffeeMaker",), + translation_key="regular_preheat_finished", + appliance_types=("Oven",), ), HomeConnectSensorEntityDescription( - key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DRIP_TRAY_FULL, + key=EventKey.LAUNDRY_CARE_DRYER_EVENT_DRYING_PROCESS_FINISHED, device_class=SensorDeviceClass.ENUM, options=EVENT_OPTIONS, - default_value="off", - translation_key="drip_tray_full", - appliance_types=("CoffeeMaker",), + translation_key="drying_process_finished", + appliance_types=("Dryer",), ), HomeConnectSensorEntityDescription( key=EventKey.DISHCARE_DISHWASHER_EVENT_SALT_NEARLY_EMPTY, device_class=SensorDeviceClass.ENUM, options=EVENT_OPTIONS, - default_value="off", translation_key="salt_nearly_empty", appliance_types=("Dishwasher",), ), @@ -254,10 +257,219 @@ EVENT_SENSORS = ( key=EventKey.DISHCARE_DISHWASHER_EVENT_RINSE_AID_NEARLY_EMPTY, device_class=SensorDeviceClass.ENUM, options=EVENT_OPTIONS, - default_value="off", translation_key="rinse_aid_nearly_empty", appliance_types=("Dishwasher",), ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_BEAN_CONTAINER_EMPTY, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="bean_container_empty", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_WATER_TANK_EMPTY, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="water_tank_empty", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DRIP_TRAY_FULL, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="drip_tray_full", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_KEEP_MILK_TANK_COOL, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="keep_milk_tank_cool", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DESCALING_IN_20_CUPS, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="descaling_in_20_cups", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DESCALING_IN_15_CUPS, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="descaling_in_15_cups", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DESCALING_IN_10_CUPS, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="descaling_in_10_cups", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DESCALING_IN_5_CUPS, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="descaling_in_5_cups", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DEVICE_SHOULD_BE_DESCALED, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="device_should_be_descaled", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DEVICE_DESCALING_OVERDUE, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="device_descaling_overdue", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DEVICE_DESCALING_BLOCKAGE, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="device_descaling_blockage", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DEVICE_SHOULD_BE_CLEANED, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="device_should_be_cleaned", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DEVICE_CLEANING_OVERDUE, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="device_cleaning_overdue", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_CALC_N_CLEAN_IN20CUPS, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="calc_n_clean_in20cups", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_CALC_N_CLEAN_IN15CUPS, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="calc_n_clean_in15cups", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_CALC_N_CLEAN_IN10CUPS, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="calc_n_clean_in10cups", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_CALC_N_CLEAN_IN5CUPS, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="calc_n_clean_in5cups", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DEVICE_SHOULD_BE_CALC_N_CLEANED, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="device_should_be_calc_n_cleaned", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DEVICE_CALC_N_CLEAN_OVERDUE, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="device_calc_n_clean_overdue", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DEVICE_CALC_N_CLEAN_BLOCKAGE, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="device_calc_n_clean_blockage", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.REFRIGERATION_FRIDGE_FREEZER_EVENT_DOOR_ALARM_FREEZER, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="freezer_door_alarm", + appliance_types=("FridgeFreezer", "Freezer"), + ), + HomeConnectSensorEntityDescription( + key=EventKey.REFRIGERATION_FRIDGE_FREEZER_EVENT_DOOR_ALARM_REFRIGERATOR, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="refrigerator_door_alarm", + appliance_types=("FridgeFreezer", "Refrigerator"), + ), + HomeConnectSensorEntityDescription( + key=EventKey.REFRIGERATION_FRIDGE_FREEZER_EVENT_TEMPERATURE_ALARM_FREEZER, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="freezer_temperature_alarm", + appliance_types=("FridgeFreezer", "Freezer"), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_CLEANING_ROBOT_EVENT_EMPTY_DUST_BOX_AND_CLEAN_FILTER, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="empty_dust_box_and_clean_filter", + appliance_types=("CleaningRobot",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_CLEANING_ROBOT_EVENT_ROBOT_IS_STUCK, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="robot_is_stuck", + appliance_types=("CleaningRobot",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_CLEANING_ROBOT_EVENT_DOCKING_STATION_NOT_FOUND, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="docking_station_not_found", + appliance_types=("CleaningRobot",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.LAUNDRY_CARE_WASHER_EVENT_I_DOS_1_FILL_LEVEL_POOR, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="poor_i_dos_1_fill_level", + appliance_types=("Washer", "WasherDryer"), + ), + HomeConnectSensorEntityDescription( + key=EventKey.LAUNDRY_CARE_WASHER_EVENT_I_DOS_2_FILL_LEVEL_POOR, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="poor_i_dos_2_fill_level", + appliance_types=("Washer", "WasherDryer"), + ), + HomeConnectSensorEntityDescription( + key=EventKey.COOKING_COMMON_EVENT_HOOD_GREASE_FILTER_MAX_SATURATION_NEARLY_REACHED, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="grease_filter_max_saturation_nearly_reached", + appliance_types=("Hood",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.COOKING_COMMON_EVENT_HOOD_GREASE_FILTER_MAX_SATURATION_REACHED, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + translation_key="grease_filter_max_saturation_reached", + appliance_types=("Hood",), + ), ) @@ -267,12 +479,6 @@ def _get_entities_for_appliance( ) -> list[HomeConnectEntity]: """Get a list of entities.""" return [ - *[ - HomeConnectEventSensor(entry.runtime_data, appliance, description) - for description in EVENT_SENSORS - if description.appliance_types - and appliance.info.type in description.appliance_types - ], *[ HomeConnectProgramSensor(entry.runtime_data, appliance, desc) for desc in BSH_PROGRAM_SENSORS @@ -286,6 +492,72 @@ def _get_entities_for_appliance( ] +def _add_event_sensor_entity( + entry: HomeConnectConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, + appliance: HomeConnectApplianceData, + description: HomeConnectSensorEntityDescription, + remove_event_sensor_listener_list: list[Callable[[], None]], +) -> None: + """Add an event sensor entity.""" + if ( + (appliance_data := entry.runtime_data.data.get(appliance.info.ha_id)) is None + ) or description.key not in appliance_data.events: + return + + for remove_listener in remove_event_sensor_listener_list: + remove_listener() + async_add_entities( + [ + HomeConnectEventSensor(entry.runtime_data, appliance, description), + ] + ) + + +def _add_event_sensor_listeners( + entry: HomeConnectConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, + remove_event_sensor_listener_dict: dict[str, list[CALLBACK_TYPE]], +) -> None: + for appliance in entry.runtime_data.data.values(): + if appliance.info.ha_id in remove_event_sensor_listener_dict: + continue + for event_sensor_description in EVENT_SENSORS: + if appliance.info.type not in cast( + tuple[str, ...], event_sensor_description.appliance_types + ): + continue + # We use a list as a kind of lazy initializer, as we can use the + # remove_listener while we are initializing it. + remove_event_sensor_listener_list = remove_event_sensor_listener_dict[ + appliance.info.ha_id + ] + remove_listener = entry.runtime_data.async_add_listener( + partial( + _add_event_sensor_entity, + entry, + async_add_entities, + appliance, + event_sensor_description, + remove_event_sensor_listener_list, + ), + (appliance.info.ha_id, event_sensor_description.key), + ) + remove_event_sensor_listener_list.append(remove_listener) + entry.async_on_unload(remove_listener) + + +def _remove_event_sensor_listeners_on_depaired( + entry: HomeConnectConfigEntry, + remove_event_sensor_listener_dict: dict[str, list[CALLBACK_TYPE]], +) -> None: + registered_listeners_ha_id = set(remove_event_sensor_listener_dict) + actual_appliances = set(entry.runtime_data.data) + for appliance_ha_id in registered_listeners_ha_id - actual_appliances: + for listener in remove_event_sensor_listener_dict.pop(appliance_ha_id): + listener() + + async def async_setup_entry( hass: HomeAssistant, entry: HomeConnectConfigEntry, @@ -298,6 +570,32 @@ async def async_setup_entry( async_add_entities, ) + remove_event_sensor_listener_dict: dict[str, list[CALLBACK_TYPE]] = defaultdict( + list + ) + + entry.async_on_unload( + entry.runtime_data.async_add_special_listener( + partial( + _add_event_sensor_listeners, + entry, + async_add_entities, + remove_event_sensor_listener_dict, + ), + (EventKey.BSH_COMMON_APPLIANCE_PAIRED,), + ) + ) + entry.async_on_unload( + entry.runtime_data.async_add_special_listener( + partial( + _remove_event_sensor_listeners_on_depaired, + entry, + remove_event_sensor_listener_dict, + ), + (EventKey.BSH_COMMON_APPLIANCE_DEPAIRED,), + ) + ) + class HomeConnectSensor(HomeConnectEntity, SensorEntity): """Sensor class for Home Connect.""" @@ -402,8 +700,5 @@ class HomeConnectEventSensor(HomeConnectSensor): def update_native_value(self) -> None: """Update the sensor's status.""" - event = self.appliance.events.get(cast(EventKey, self.bsh_key)) - if event: - self._update_native_value(event.value) - elif not self._attr_native_value: - self._attr_native_value = self.entity_description.default_value + event = self.appliance.events[cast(EventKey, self.bsh_key)] + self._update_native_value(event.value) diff --git a/homeassistant/components/home_connect/services.yaml b/homeassistant/components/home_connect/services.yaml index 2b53090fd34..e07e8e91457 100644 --- a/homeassistant/components/home_connect/services.yaml +++ b/homeassistant/components/home_connect/services.yaml @@ -64,7 +64,6 @@ set_program_and_options: - selected_program program: example: dishcare_dishwasher_program_auto2 - required: true selector: select: mode: dropdown diff --git a/homeassistant/components/home_connect/strings.json b/homeassistant/components/home_connect/strings.json index 00ab29affd8..ad7f67968f5 100644 --- a/homeassistant/components/home_connect/strings.json +++ b/homeassistant/components/home_connect/strings.json @@ -110,17 +110,71 @@ } }, "issues": { + "deprecated_time_alarm_clock_in_automations_scripts": { + "title": "Deprecated alarm clock entity detected in some automations or scripts", + "fix_flow": { + "step": { + "confirm": { + "title": "[%key:component::home_connect::issues::deprecated_time_alarm_clock_in_automations_scripts::title%]", + "description": "The alarm clock entity `{entity_id}`, which is deprecated because it's being moved to the `number` platform, is used in the following automations or scripts:\n{items}\n\nPlease, fix this issue by updating your automations or scripts to use the new `number` entity." + } + } + } + }, + "deprecated_time_alarm_clock": { + "title": "Deprecated alarm clock entity", + "fix_flow": { + "step": { + "confirm": { + "title": "[%key:component::home_connect::issues::deprecated_time_alarm_clock::title%]", + "description": "The alarm clock entity `{entity_id}` is deprecated because it's being moved to the `number` platform.\n\nPlease use the new `number` entity." + } + } + } + }, "deprecated_binary_common_door_sensor": { "title": "Deprecated binary door sensor detected in some automations or scripts", - "description": "The binary door sensor `{entity}`, which is deprecated, is used in the following automations or scripts:\n{items}\n\nA sensor entity with additional possible states is available and should be used going forward; Please use it on the above automations or scripts to fix this issue." + "fix_flow": { + "step": { + "confirm": { + "title": "[%key:component::home_connect::issues::deprecated_binary_common_door_sensor::title%]", + "description": "The binary door sensor `{entity}`, which is deprecated, is used in the following automations or scripts:\n{items}\n\nA sensor entity with additional possible states is available and should be used going forward; Please use it on the above automations or scripts to fix this issue." + } + } + } }, "deprecated_command_actions": { "title": "The command related actions are deprecated in favor of the new buttons", - "description": "The `pause_program` and `resume_program` actions have been deprecated in favor of new button entities, if the command is available for your appliance. Please update your automations, scripts and panels that use this action to use the button entities instead, and press on submit to fix the issue." + "fix_flow": { + "step": { + "confirm": { + "title": "[%key:component::home_connect::issues::deprecated_command_actions::title%]", + "description": "The `pause_program` and `resume_program` actions have been deprecated in favor of new button entities, if the command is available for your appliance. Please update your automations, scripts and panels that use this action to use the button entities instead, and press on submit to fix the issue." + } + } + } + }, + "deprecated_program_switch_in_automations_scripts": { + "title": "Deprecated program switch detected in some automations or scripts", + "fix_flow": { + "step": { + "confirm": { + "title": "[%key:component::home_connect::issues::deprecated_program_switch_in_automations_scripts::title%]", + "description": "Program switches are deprecated and {entity_id} is used in the following automations or scripts:\n{items}\n\nYou can use the active program select entity to run the program without any additional options and get the current running program on the above automations or scripts to fix this issue." + } + } + } }, "deprecated_program_switch": { - "title": "Deprecated program switch detected in some automations or scripts", - "description": "Program switches are deprecated and {entity_id} is used in the following automations or scripts:\n{items}\n\nYou can use the active program select entity to run the program without any additional options and get the current running program on the above automations or scripts to fix this issue." + "title": "Deprecated program switch entities", + "fix_flow": { + "step": { + "confirm": { + "title": "[%key:component::home_connect::issues::deprecated_program_switch::title%]", + "description": "The switch entity `{entity_id}` and all the other program switches are deprecated.\n\nPlease use the active program select entity instead." + } + } + } }, "deprecated_set_program_and_option_actions": { "title": "The executed action is deprecated", @@ -457,7 +511,7 @@ }, "spin_speed": { "options": { - "laundry_care_washer_enum_type_spin_speed_off": "Off", + "laundry_care_washer_enum_type_spin_speed_off": "[%key:common::state::off%]", "laundry_care_washer_enum_type_spin_speed_r_p_m_400": "400 rpm", "laundry_care_washer_enum_type_spin_speed_r_p_m_600": "600 rpm", "laundry_care_washer_enum_type_spin_speed_r_p_m_700": "700 rpm", @@ -467,7 +521,7 @@ "laundry_care_washer_enum_type_spin_speed_r_p_m_1200": "1200 rpm", "laundry_care_washer_enum_type_spin_speed_r_p_m_1400": "1400 rpm", "laundry_care_washer_enum_type_spin_speed_r_p_m_1600": "1600 rpm", - "laundry_care_washer_enum_type_spin_speed_ul_off": "Off", + "laundry_care_washer_enum_type_spin_speed_ul_off": "[%key:common::state::off%]", "laundry_care_washer_enum_type_spin_speed_ul_low": "Low", "laundry_care_washer_enum_type_spin_speed_ul_medium": "Medium", "laundry_care_washer_enum_type_spin_speed_ul_high": "High" @@ -475,7 +529,7 @@ }, "vario_perfect": { "options": { - "laundry_care_common_enum_type_vario_perfect_off": "Off", + "laundry_care_common_enum_type_vario_perfect_off": "[%key:common::state::off%]", "laundry_care_common_enum_type_vario_perfect_eco_perfect": "Eco perfect", "laundry_care_common_enum_type_vario_perfect_speed_perfect": "Speed perfect" } @@ -868,6 +922,9 @@ } }, "number": { + "alarm_clock": { + "name": "Alarm clock" + }, "refrigerator_setpoint_temperature": { "name": "Refrigerator temperature" }, @@ -1437,7 +1494,7 @@ "spin_speed": { "name": "[%key:component::home_connect::services::set_program_and_options::fields::laundry_care_washer_option_spin_speed::name%]", "state": { - "laundry_care_washer_enum_type_spin_speed_off": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_off%]", + "laundry_care_washer_enum_type_spin_speed_off": "[%key:common::state::off%]", "laundry_care_washer_enum_type_spin_speed_r_p_m_400": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m_400%]", "laundry_care_washer_enum_type_spin_speed_r_p_m_600": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m_600%]", "laundry_care_washer_enum_type_spin_speed_r_p_m_700": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m_700%]", @@ -1447,7 +1504,7 @@ "laundry_care_washer_enum_type_spin_speed_r_p_m_1200": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m_1200%]", "laundry_care_washer_enum_type_spin_speed_r_p_m_1400": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m_1400%]", "laundry_care_washer_enum_type_spin_speed_r_p_m_1600": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m_1600%]", - "laundry_care_washer_enum_type_spin_speed_ul_off": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_ul_off%]", + "laundry_care_washer_enum_type_spin_speed_ul_off": "[%key:common::state::off%]", "laundry_care_washer_enum_type_spin_speed_ul_low": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_ul_low%]", "laundry_care_washer_enum_type_spin_speed_ul_medium": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_ul_medium%]", "laundry_care_washer_enum_type_spin_speed_ul_high": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_ul_high%]" @@ -1456,7 +1513,7 @@ "vario_perfect": { "name": "[%key:component::home_connect::services::set_program_and_options::fields::laundry_care_washer_option_vario_perfect::name%]", "state": { - "laundry_care_common_enum_type_vario_perfect_off": "[%key:component::home_connect::selector::vario_perfect::options::laundry_care_common_enum_type_vario_perfect_off%]", + "laundry_care_common_enum_type_vario_perfect_off": "[%key:common::state::off%]", "laundry_care_common_enum_type_vario_perfect_eco_perfect": "[%key:component::home_connect::selector::vario_perfect::options::laundry_care_common_enum_type_vario_perfect_eco_perfect%]", "laundry_care_common_enum_type_vario_perfect_speed_perfect": "[%key:component::home_connect::selector::vario_perfect::options::laundry_care_common_enum_type_vario_perfect_speed_perfect%]" } @@ -1545,23 +1602,64 @@ "oven_current_cavity_temperature": { "name": "Current oven cavity temperature" }, - "freezer_door_alarm": { - "name": "Freezer door alarm", - "state": { - "confirmed": "[%key:component::home_connect::common::confirmed%]", - "present": "[%key:component::home_connect::common::present%]" - } - }, - "refrigerator_door_alarm": { - "name": "Refrigerator door alarm", + "program_aborted": { + "name": "Program aborted", "state": { "off": "[%key:common::state::off%]", "confirmed": "[%key:component::home_connect::common::confirmed%]", "present": "[%key:component::home_connect::common::present%]" } }, - "freezer_temperature_alarm": { - "name": "Freezer temperature alarm", + "program_finished": { + "name": "Program finished", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "alarm_clock_elapsed": { + "name": "Alarm clock elapsed", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "preheat_finished": { + "name": "Pre-heat finished", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "regular_preheat_finished": { + "name": "Regular pre-heat finished", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "drying_process_finished": { + "name": "Drying process finished", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "salt_nearly_empty": { + "name": "Salt nearly empty", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "rinse_aid_nearly_empty": { + "name": "Rinse aid nearly empty", "state": { "off": "[%key:common::state::off%]", "confirmed": "[%key:component::home_connect::common::confirmed%]", @@ -1592,16 +1690,216 @@ "present": "[%key:component::home_connect::common::present%]" } }, - "salt_nearly_empty": { - "name": "Salt nearly empty", + "keep_milk_tank_cool": { + "name": "Keep milk tank cool", "state": { "off": "[%key:common::state::off%]", "confirmed": "[%key:component::home_connect::common::confirmed%]", "present": "[%key:component::home_connect::common::present%]" } }, - "rinse_aid_nearly_empty": { - "name": "Rinse aid nearly empty", + "descaling_in_20_cups": { + "name": "Descaling in 20 cups", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "descaling_in_15_cups": { + "name": "Descaling in 15 cups", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "descaling_in_10_cups": { + "name": "Descaling in 10 cups", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "descaling_in_5_cups": { + "name": "Descaling in 5 cups", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "device_should_be_descaled": { + "name": "Device should be descaled", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "device_descaling_overdue": { + "name": "Device descaling overdue", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "device_descaling_blockage": { + "name": "Device descaling blockage", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "device_should_be_cleaned": { + "name": "Device should be cleaned", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "device_cleaning_overdue": { + "name": "Device cleaning overdue", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "calc_n_clean_in20cups": { + "name": "Calc'N'Clean in 20 cups", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "calc_n_clean_in15cups": { + "name": "Calc'N'Clean in 15 cups", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "calc_n_clean_in10cups": { + "name": "Calc'N'Clean in 10 cups", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "calc_n_clean_in5cups": { + "name": "Calc'N'Clean in 5 cups", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "device_should_be_calc_n_cleaned": { + "name": "Device should be Calc'N'Cleaned", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "device_calc_n_clean_overdue": { + "name": "Device Calc'N'Clean overdue", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "device_calc_n_clean_blockage": { + "name": "Device Calc'N'Clean blockage", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "freezer_door_alarm": { + "name": "Freezer door alarm", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "refrigerator_door_alarm": { + "name": "Refrigerator door alarm", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "freezer_temperature_alarm": { + "name": "Freezer temperature alarm", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "empty_dust_box_and_clean_filter": { + "name": "Empty dust box and clean filter", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "robot_is_stuck": { + "name": "Robot is stuck", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "docking_station_not_found": { + "name": "Docking station not found", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "poor_i_dos_1_fill_level": { + "name": "Poor i-Dos 1 fill level", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "poor_i_dos_2_fill_level": { + "name": "Poor i-Dos 2 fill level", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "grease_filter_max_saturation_nearly_reached": { + "name": "Grease filter max saturation nearly reached", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "grease_filter_max_saturation_reached": { + "name": "Grease filter max saturation reached", "state": { "off": "[%key:common::state::off%]", "confirmed": "[%key:component::home_connect::common::confirmed%]", diff --git a/homeassistant/components/home_connect/switch.py b/homeassistant/components/home_connect/switch.py index 33e30f184b7..05f0ed2ddc3 100644 --- a/homeassistant/components/home_connect/switch.py +++ b/homeassistant/components/home_connect/switch.py @@ -266,7 +266,10 @@ class HomeConnectProgramSwitch(HomeConnectEntity, SwitchEntity): super().__init__( coordinator, appliance, - SwitchEntityDescription(key=EventKey.BSH_COMMON_ROOT_ACTIVE_PROGRAM), + SwitchEntityDescription( + key=EventKey.BSH_COMMON_ROOT_ACTIVE_PROGRAM, + entity_registry_enabled_default=False, + ), ) self._attr_name = f"{appliance.info.name} {desc}" self._attr_unique_id = f"{appliance.info.ha_id}-{desc}" @@ -304,11 +307,12 @@ class HomeConnectProgramSwitch(HomeConnectEntity, SwitchEntity): async_create_issue( self.hass, DOMAIN, - f"deprecated_program_switch_{self.entity_id}", + f"deprecated_program_switch_in_automations_scripts_{self.entity_id}", breaks_in_ha_version="2025.6.0", - is_fixable=False, + is_fixable=True, + is_persistent=True, severity=IssueSeverity.WARNING, - translation_key="deprecated_program_switch", + translation_key="deprecated_program_switch_in_automations_scripts", translation_placeholders={ "entity_id": self.entity_id, "items": "\n".join(items_list), @@ -317,12 +321,34 @@ class HomeConnectProgramSwitch(HomeConnectEntity, SwitchEntity): async def async_will_remove_from_hass(self) -> None: """Call when entity will be removed from hass.""" + async_delete_issue( + self.hass, + DOMAIN, + f"deprecated_program_switch_in_automations_scripts_{self.entity_id}", + ) async_delete_issue( self.hass, DOMAIN, f"deprecated_program_switch_{self.entity_id}" ) + def create_action_handler_issue(self) -> None: + """Create deprecation issue.""" + async_create_issue( + self.hass, + DOMAIN, + f"deprecated_program_switch_{self.entity_id}", + breaks_in_ha_version="2025.6.0", + is_fixable=True, + is_persistent=True, + severity=IssueSeverity.WARNING, + translation_key="deprecated_program_switch", + translation_placeholders={ + "entity_id": self.entity_id, + }, + ) + async def async_turn_on(self, **kwargs: Any) -> None: """Start the program.""" + self.create_action_handler_issue() try: await self.coordinator.client.start_program( self.appliance.info.ha_id, program_key=self.program.key @@ -339,6 +365,7 @@ class HomeConnectProgramSwitch(HomeConnectEntity, SwitchEntity): async def async_turn_off(self, **kwargs: Any) -> None: """Stop the program.""" + self.create_action_handler_issue() try: await self.coordinator.client.stop_program(self.appliance.info.ha_id) except HomeConnectError as err: diff --git a/homeassistant/components/home_connect/time.py b/homeassistant/components/home_connect/time.py index 7cfa0a7d3e4..adf26d2d973 100644 --- a/homeassistant/components/home_connect/time.py +++ b/homeassistant/components/home_connect/time.py @@ -1,4 +1,4 @@ -"""Provides time enties for Home Connect.""" +"""Provides time entities for Home Connect.""" from datetime import time from typing import cast @@ -6,10 +6,18 @@ from typing import cast from aiohomeconnect.model import SettingKey from aiohomeconnect.model.error import HomeConnectError +from homeassistant.components.automation import automations_with_entity +from homeassistant.components.script import scripts_with_entity from homeassistant.components.time import TimeEntity, TimeEntityDescription from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from homeassistant.helpers.issue_registry import ( + IssueSeverity, + async_create_issue, + async_delete_issue, +) from .common import setup_home_connect_entry from .const import DOMAIN @@ -23,6 +31,7 @@ TIME_ENTITIES = ( TimeEntityDescription( key=SettingKey.BSH_COMMON_ALARM_CLOCK, translation_key="alarm_clock", + entity_registry_enabled_default=False, ), ) @@ -67,8 +76,78 @@ def time_to_seconds(t: time) -> int: class HomeConnectTimeEntity(HomeConnectEntity, TimeEntity): """Time setting class for Home Connect.""" + async def async_added_to_hass(self) -> None: + """Call when entity is added to hass.""" + await super().async_added_to_hass() + if self.bsh_key == SettingKey.BSH_COMMON_ALARM_CLOCK: + automations = automations_with_entity(self.hass, self.entity_id) + scripts = scripts_with_entity(self.hass, self.entity_id) + items = automations + scripts + if not items: + return + + entity_reg: er.EntityRegistry = er.async_get(self.hass) + entity_automations = [ + automation_entity + for automation_id in automations + if (automation_entity := entity_reg.async_get(automation_id)) + ] + entity_scripts = [ + script_entity + for script_id in scripts + if (script_entity := entity_reg.async_get(script_id)) + ] + + items_list = [ + f"- [{item.original_name}](/config/automation/edit/{item.unique_id})" + for item in entity_automations + ] + [ + f"- [{item.original_name}](/config/script/edit/{item.unique_id})" + for item in entity_scripts + ] + + async_create_issue( + self.hass, + DOMAIN, + f"deprecated_time_alarm_clock_in_automations_scripts_{self.entity_id}", + breaks_in_ha_version="2025.10.0", + is_fixable=True, + is_persistent=True, + severity=IssueSeverity.WARNING, + translation_key="deprecated_time_alarm_clock", + translation_placeholders={ + "entity_id": self.entity_id, + "items": "\n".join(items_list), + }, + ) + + async def async_will_remove_from_hass(self) -> None: + """Call when entity will be removed from hass.""" + if self.bsh_key == SettingKey.BSH_COMMON_ALARM_CLOCK: + async_delete_issue( + self.hass, + DOMAIN, + f"deprecated_time_alarm_clock_in_automations_scripts_{self.entity_id}", + ) + async_delete_issue( + self.hass, DOMAIN, f"deprecated_time_alarm_clock_{self.entity_id}" + ) + async def async_set_value(self, value: time) -> None: """Set the native value of the entity.""" + async_create_issue( + self.hass, + DOMAIN, + f"deprecated_time_alarm_clock_{self.entity_id}", + breaks_in_ha_version="2025.10.0", + is_fixable=True, + is_persistent=True, + severity=IssueSeverity.WARNING, + translation_key="deprecated_time_alarm_clock", + translation_placeholders={ + "entity_id": self.entity_id, + }, + ) try: await self.coordinator.client.set_setting( self.appliance.info.ha_id, diff --git a/homeassistant/components/homeassistant_hardware/coordinator.py b/homeassistant/components/homeassistant_hardware/coordinator.py index 9eb900b13fd..c9a5c891328 100644 --- a/homeassistant/components/homeassistant_hardware/coordinator.py +++ b/homeassistant/components/homeassistant_hardware/coordinator.py @@ -31,7 +31,6 @@ class FirmwareUpdateCoordinator(DataUpdateCoordinator[FirmwareManifest]): _LOGGER, name="firmware update coordinator", update_interval=FIRMWARE_REFRESH_INTERVAL, - always_update=False, ) self.hass = hass self.session = session diff --git a/homeassistant/components/homeassistant_hardware/update.py b/homeassistant/components/homeassistant_hardware/update.py index e835286238f..960facc81f8 100644 --- a/homeassistant/components/homeassistant_hardware/update.py +++ b/homeassistant/components/homeassistant_hardware/update.py @@ -199,7 +199,7 @@ class BaseFirmwareUpdateEntity( # This entity is not currently associated with a device so we must manually # give it a name self._attr_name = f"{self._config_entry.title} Update" - self._attr_title = self.entity_description.firmware_name or "unknown" + self._attr_title = self.entity_description.firmware_name or "Unknown" if ( self._current_firmware_info is None diff --git a/homeassistant/components/homeassistant_sky_connect/__init__.py b/homeassistant/components/homeassistant_sky_connect/__init__.py index b3af47df61d..e8b8c3bb433 100644 --- a/homeassistant/components/homeassistant_sky_connect/__init__.py +++ b/homeassistant/components/homeassistant_sky_connect/__init__.py @@ -15,14 +15,13 @@ _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a Home Assistant SkyConnect config entry.""" - await hass.config_entries.async_forward_entry_setups(entry, ["update"]) - return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" + await hass.config_entries.async_unload_platforms(entry, ["update"]) return True diff --git a/homeassistant/components/homeassistant_sky_connect/update.py b/homeassistant/components/homeassistant_sky_connect/update.py index 43e3f1ca255..5eaa1e220be 100644 --- a/homeassistant/components/homeassistant_sky_connect/update.py +++ b/homeassistant/components/homeassistant_sky_connect/update.py @@ -21,11 +21,20 @@ from homeassistant.components.update import UpdateDeviceClass from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback -from .const import FIRMWARE, FIRMWARE_VERSION, NABU_CASA_FIRMWARE_RELEASES_URL +from .const import ( + DOMAIN, + FIRMWARE, + FIRMWARE_VERSION, + NABU_CASA_FIRMWARE_RELEASES_URL, + PRODUCT, + SERIAL_NUMBER, + HardwareVariant, +) _LOGGER = logging.getLogger(__name__) @@ -42,7 +51,7 @@ FIRMWARE_ENTITY_DESCRIPTIONS: dict[ fw_type="skyconnect_zigbee_ncp", version_key="ezsp_version", expected_firmware_type=ApplicationType.EZSP, - firmware_name="EmberZNet", + firmware_name="EmberZNet Zigbee", ), ApplicationType.SPINEL: FirmwareUpdateEntityDescription( key="firmware", @@ -55,6 +64,28 @@ FIRMWARE_ENTITY_DESCRIPTIONS: dict[ expected_firmware_type=ApplicationType.SPINEL, firmware_name="OpenThread RCP", ), + ApplicationType.CPC: FirmwareUpdateEntityDescription( + key="firmware", + display_precision=0, + device_class=UpdateDeviceClass.FIRMWARE, + entity_category=EntityCategory.CONFIG, + version_parser=lambda fw: fw, + fw_type="skyconnect_multipan", + version_key="cpc_version", + expected_firmware_type=ApplicationType.CPC, + firmware_name="Multiprotocol", + ), + ApplicationType.GECKO_BOOTLOADER: FirmwareUpdateEntityDescription( + key="firmware", + display_precision=0, + device_class=UpdateDeviceClass.FIRMWARE, + entity_category=EntityCategory.CONFIG, + version_parser=lambda fw: fw, + fw_type=None, # We don't want to update the bootloader + version_key="gecko_bootloader_version", + expected_firmware_type=ApplicationType.GECKO_BOOTLOADER, + firmware_name="Gecko Bootloader", + ), None: FirmwareUpdateEntityDescription( key="firmware", display_precision=0, @@ -77,9 +108,16 @@ def _async_create_update_entity( ) -> FirmwareUpdateEntity: """Create an update entity that handles firmware type changes.""" firmware_type = config_entry.data[FIRMWARE] - entity_description = FIRMWARE_ENTITY_DESCRIPTIONS[ - ApplicationType(firmware_type) if firmware_type is not None else None - ] + + try: + entity_description = FIRMWARE_ENTITY_DESCRIPTIONS[ + ApplicationType(firmware_type) + ] + except (KeyError, ValueError): + _LOGGER.debug( + "Unknown firmware type %r, using default entity description", firmware_type + ) + entity_description = FIRMWARE_ENTITY_DESCRIPTIONS[None] entity = FirmwareUpdateEntity( device=config_entry.data["device"], @@ -130,6 +168,7 @@ class FirmwareUpdateEntity(BaseFirmwareUpdateEntity): """SkyConnect firmware update entity.""" bootloader_reset_type = None + _attr_has_entity_name = True def __init__( self, @@ -141,8 +180,18 @@ class FirmwareUpdateEntity(BaseFirmwareUpdateEntity): """Initialize the SkyConnect firmware update entity.""" super().__init__(device, config_entry, update_coordinator, entity_description) - self._attr_unique_id = ( - f"{self._config_entry.data['serial_number']}_{self.entity_description.key}" + variant = HardwareVariant.from_usb_product_name( + self._config_entry.data[PRODUCT] + ) + serial_number = self._config_entry.data[SERIAL_NUMBER] + + self._attr_unique_id = f"{serial_number}_{self.entity_description.key}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, serial_number)}, + name=f"{variant.full_name} ({serial_number[:8]})", + model=variant.full_name, + manufacturer="Nabu Casa", + serial_number=serial_number, ) # Use the cached firmware info if it exists @@ -155,6 +204,17 @@ class FirmwareUpdateEntity(BaseFirmwareUpdateEntity): source="homeassistant_sky_connect", ) + def _update_attributes(self) -> None: + """Recompute the attributes of the entity.""" + super()._update_attributes() + + assert self.device_entry is not None + device_registry = dr.async_get(self.hass) + device_registry.async_update_device( + device_id=self.device_entry.id, + sw_version=f"{self.entity_description.firmware_name} {self._attr_installed_version}", + ) + @callback def _firmware_info_callback(self, firmware_info: FirmwareInfo) -> None: """Handle updated firmware info being pushed by an integration.""" diff --git a/homeassistant/components/homeassistant_yellow/__init__.py b/homeassistant/components/homeassistant_yellow/__init__.py index 06f908ab61e..71aa8ef99b7 100644 --- a/homeassistant/components/homeassistant_yellow/__init__.py +++ b/homeassistant/components/homeassistant_yellow/__init__.py @@ -62,6 +62,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" + await hass.config_entries.async_unload_platforms(entry, ["update"]) return True diff --git a/homeassistant/components/homeassistant_yellow/const.py b/homeassistant/components/homeassistant_yellow/const.py index b98b1133d01..b8bf17391f9 100644 --- a/homeassistant/components/homeassistant_yellow/const.py +++ b/homeassistant/components/homeassistant_yellow/const.py @@ -2,8 +2,9 @@ DOMAIN = "homeassistant_yellow" -RADIO_MODEL = "Home Assistant Yellow" -RADIO_MANUFACTURER = "Nabu Casa" +MODEL = "Home Assistant Yellow" +MANUFACTURER = "Nabu Casa" + RADIO_DEVICE = "/dev/ttyAMA1" ZHA_HW_DISCOVERY_DATA = { diff --git a/homeassistant/components/homeassistant_yellow/strings.json b/homeassistant/components/homeassistant_yellow/strings.json index b089e483899..ddff5fd9b6d 100644 --- a/homeassistant/components/homeassistant_yellow/strings.json +++ b/homeassistant/components/homeassistant_yellow/strings.json @@ -149,5 +149,12 @@ "run_zigbee_flasher_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::run_zigbee_flasher_addon%]", "uninstall_zigbee_flasher_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::uninstall_zigbee_flasher_addon%]" } + }, + "entity": { + "update": { + "firmware": { + "name": "Radio firmware" + } + } } } diff --git a/homeassistant/components/homeassistant_yellow/update.py b/homeassistant/components/homeassistant_yellow/update.py index 88d4f2912d3..94989d5c6b6 100644 --- a/homeassistant/components/homeassistant_yellow/update.py +++ b/homeassistant/components/homeassistant_yellow/update.py @@ -21,13 +21,17 @@ from homeassistant.components.update import UpdateDeviceClass from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from .const import ( + DOMAIN, FIRMWARE, FIRMWARE_VERSION, + MANUFACTURER, + MODEL, NABU_CASA_FIRMWARE_RELEASES_URL, RADIO_DEVICE, ) @@ -39,7 +43,7 @@ FIRMWARE_ENTITY_DESCRIPTIONS: dict[ ApplicationType | None, FirmwareUpdateEntityDescription ] = { ApplicationType.EZSP: FirmwareUpdateEntityDescription( - key="firmware", + key="radio_firmware", display_precision=0, device_class=UpdateDeviceClass.FIRMWARE, entity_category=EntityCategory.CONFIG, @@ -47,10 +51,10 @@ FIRMWARE_ENTITY_DESCRIPTIONS: dict[ fw_type="yellow_zigbee_ncp", version_key="ezsp_version", expected_firmware_type=ApplicationType.EZSP, - firmware_name="EmberZNet", + firmware_name="EmberZNet Zigbee", ), ApplicationType.SPINEL: FirmwareUpdateEntityDescription( - key="firmware", + key="radio_firmware", display_precision=0, device_class=UpdateDeviceClass.FIRMWARE, entity_category=EntityCategory.CONFIG, @@ -60,12 +64,34 @@ FIRMWARE_ENTITY_DESCRIPTIONS: dict[ expected_firmware_type=ApplicationType.SPINEL, firmware_name="OpenThread RCP", ), - None: FirmwareUpdateEntityDescription( + ApplicationType.CPC: FirmwareUpdateEntityDescription( key="firmware", display_precision=0, device_class=UpdateDeviceClass.FIRMWARE, entity_category=EntityCategory.CONFIG, version_parser=lambda fw: fw, + fw_type="yellow_multipan", + version_key="cpc_version", + expected_firmware_type=ApplicationType.CPC, + firmware_name="Multiprotocol", + ), + ApplicationType.GECKO_BOOTLOADER: FirmwareUpdateEntityDescription( + key="firmware", + display_precision=0, + device_class=UpdateDeviceClass.FIRMWARE, + entity_category=EntityCategory.CONFIG, + version_parser=lambda fw: fw, + fw_type=None, # We don't want to update the bootloader + version_key="gecko_bootloader_version", + expected_firmware_type=ApplicationType.GECKO_BOOTLOADER, + firmware_name="Gecko Bootloader", + ), + None: FirmwareUpdateEntityDescription( + key="radio_firmware", + display_precision=0, + device_class=UpdateDeviceClass.FIRMWARE, + entity_category=EntityCategory.CONFIG, + version_parser=lambda fw: fw, fw_type=None, version_key=None, expected_firmware_type=None, @@ -82,9 +108,16 @@ def _async_create_update_entity( ) -> FirmwareUpdateEntity: """Create an update entity that handles firmware type changes.""" firmware_type = config_entry.data[FIRMWARE] - entity_description = FIRMWARE_ENTITY_DESCRIPTIONS[ - ApplicationType(firmware_type) if firmware_type is not None else None - ] + + try: + entity_description = FIRMWARE_ENTITY_DESCRIPTIONS[ + ApplicationType(firmware_type) + ] + except (KeyError, ValueError): + _LOGGER.debug( + "Unknown firmware type %r, using default entity description", firmware_type + ) + entity_description = FIRMWARE_ENTITY_DESCRIPTIONS[None] entity = FirmwareUpdateEntity( device=RADIO_DEVICE, @@ -135,6 +168,7 @@ class FirmwareUpdateEntity(BaseFirmwareUpdateEntity): """Yellow firmware update entity.""" bootloader_reset_type = "yellow" # Triggers a GPIO reset + _attr_has_entity_name = True def __init__( self, @@ -145,8 +179,13 @@ class FirmwareUpdateEntity(BaseFirmwareUpdateEntity): ) -> None: """Initialize the Yellow firmware update entity.""" super().__init__(device, config_entry, update_coordinator, entity_description) - self._attr_unique_id = self.entity_description.key + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, "yellow")}, + name=MODEL, + model=MODEL, + manufacturer=MANUFACTURER, + ) # Use the cached firmware info if it exists if self._config_entry.data[FIRMWARE] is not None: @@ -158,6 +197,17 @@ class FirmwareUpdateEntity(BaseFirmwareUpdateEntity): source="homeassistant_yellow", ) + def _update_attributes(self) -> None: + """Recompute the attributes of the entity.""" + super()._update_attributes() + + assert self.device_entry is not None + device_registry = dr.async_get(self.hass) + device_registry.async_update_device( + device_id=self.device_entry.id, + sw_version=f"{self.entity_description.firmware_name} {self._attr_installed_version}", + ) + @callback def _firmware_info_callback(self, firmware_info: FirmwareInfo) -> None: """Handle updated firmware info being pushed by an integration.""" diff --git a/homeassistant/components/homee/__init__.py b/homeassistant/components/homee/__init__.py index 6158a699302..9fd88ee40aa 100644 --- a/homeassistant/components/homee/__init__.py +++ b/homeassistant/components/homee/__init__.py @@ -19,6 +19,7 @@ PLATFORMS = [ Platform.BUTTON, Platform.COVER, Platform.LIGHT, + Platform.LOCK, Platform.NUMBER, Platform.SELECT, Platform.SENSOR, diff --git a/homeassistant/components/homee/config_flow.py b/homeassistant/components/homee/config_flow.py index 61d2a3f25a5..1a3c5011f82 100644 --- a/homeassistant/components/homee/config_flow.py +++ b/homeassistant/components/homee/config_flow.py @@ -52,7 +52,7 @@ class HomeeConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" except HomeeAuthenticationFailedException: errors["base"] = "invalid_auth" - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: diff --git a/homeassistant/components/homee/lock.py b/homeassistant/components/homee/lock.py new file mode 100644 index 00000000000..4cfc34e11fe --- /dev/null +++ b/homeassistant/components/homee/lock.py @@ -0,0 +1,73 @@ +"""The Homee lock platform.""" + +from typing import Any + +from pyHomee.const import AttributeChangedBy, AttributeType + +from homeassistant.components.lock import LockEntity +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import HomeeConfigEntry +from .entity import HomeeEntity +from .helpers import get_name_for_enum + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: HomeeConfigEntry, + async_add_devices: AddConfigEntryEntitiesCallback, +) -> None: + """Add the Homee platform for the lock component.""" + + async_add_devices( + HomeeLock(attribute, config_entry) + for node in config_entry.runtime_data.nodes + for attribute in node.attributes + if (attribute.type == AttributeType.LOCK_STATE and attribute.editable) + ) + + +class HomeeLock(HomeeEntity, LockEntity): + """Representation of a Homee lock.""" + + _attr_name = None + + @property + def is_locked(self) -> bool: + """Return if lock is locked.""" + return self._attribute.current_value == 1.0 + + @property + def is_locking(self) -> bool: + """Return if lock is locking.""" + return self._attribute.target_value > self._attribute.current_value + + @property + def is_unlocking(self) -> bool: + """Return if lock is unlocking.""" + return self._attribute.target_value < self._attribute.current_value + + @property + def changed_by(self) -> str: + """Return by whom or what the lock was last changed.""" + changed_id = str(self._attribute.changed_by_id) + changed_by_name = get_name_for_enum( + AttributeChangedBy, self._attribute.changed_by + ) + if self._attribute.changed_by == AttributeChangedBy.USER: + changed_id = self._entry.runtime_data.get_user_by_id( + self._attribute.changed_by_id + ).username + + return f"{changed_by_name}-{changed_id}" + + async def async_lock(self, **kwargs: Any) -> None: + """Lock specified lock. A code to lock the lock with may be specified.""" + await self.async_set_homee_value(1) + + async def async_unlock(self, **kwargs: Any) -> None: + """Unlock specified lock. A code to unlock the lock with may be specified.""" + await self.async_set_homee_value(0) diff --git a/homeassistant/components/homee/strings.json b/homeassistant/components/homee/strings.json index da8357d16bc..3dbbdcd2004 100644 --- a/homeassistant/components/homee/strings.json +++ b/homeassistant/components/homee/strings.json @@ -297,8 +297,8 @@ "open": "[%key:common::state::open%]", "closed": "[%key:common::state::closed%]", "partial": "Partially open", - "opening": "Opening", - "closing": "Closing" + "opening": "[%key:common::state::opening%]", + "closing": "[%key:common::state::closing%]" } }, "uv": { diff --git a/homeassistant/components/homekit_controller/manifest.json b/homeassistant/components/homekit_controller/manifest.json index 98db9a397d3..6562a3edcc9 100644 --- a/homeassistant/components/homekit_controller/manifest.json +++ b/homeassistant/components/homekit_controller/manifest.json @@ -14,6 +14,6 @@ "documentation": "https://www.home-assistant.io/integrations/homekit_controller", "iot_class": "local_push", "loggers": ["aiohomekit", "commentjson"], - "requirements": ["aiohomekit==3.2.8"], + "requirements": ["aiohomekit==3.2.13"], "zeroconf": ["_hap._tcp.local.", "_hap._udp.local."] } diff --git a/homeassistant/components/homekit_controller/strings.json b/homeassistant/components/homekit_controller/strings.json index d1205645fd3..dcbfae72fe3 100644 --- a/homeassistant/components/homekit_controller/strings.json +++ b/homeassistant/components/homekit_controller/strings.json @@ -141,7 +141,7 @@ "air_purifier_state_current": { "state": { "inactive": "Inactive", - "idle": "Idle", + "idle": "[%key:common::state::idle%]", "purifying": "Purifying" } } diff --git a/homeassistant/components/homematic/sensor.py b/homeassistant/components/homematic/sensor.py index 24172e196c1..bdd446d7091 100644 --- a/homeassistant/components/homematic/sensor.py +++ b/homeassistant/components/homematic/sensor.py @@ -178,6 +178,7 @@ SENSOR_DESCRIPTIONS: dict[str, SensorEntityDescription] = { key="WIND_DIRECTION", native_unit_of_measurement=DEGREE, device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), "WIND_DIRECTION_RANGE": SensorEntityDescription( key="WIND_DIRECTION_RANGE", diff --git a/homeassistant/components/homeworks/strings.json b/homeassistant/components/homeworks/strings.json index 1a144615e89..3ec4945957b 100644 --- a/homeassistant/components/homeworks/strings.json +++ b/homeassistant/components/homeworks/strings.json @@ -57,7 +57,7 @@ }, "exceptions": { "invalid_controller_id": { - "message": "Invalid controller_id \"{controller_id}\", expected one of \"{controller_ids}\"" + "message": "Invalid controller ID \"{controller_id}\", expected one of \"{controller_ids}\"" } }, "options": { diff --git a/homeassistant/components/huawei_lte/config_flow.py b/homeassistant/components/huawei_lte/config_flow.py index 96e160ece7b..4ca9e7531e3 100644 --- a/homeassistant/components/huawei_lte/config_flow.py +++ b/homeassistant/components/huawei_lte/config_flow.py @@ -178,8 +178,8 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN): except Timeout: _LOGGER.warning("Connection timeout", exc_info=True) errors[CONF_URL] = "connection_timeout" - except Exception: # noqa: BLE001 - _LOGGER.warning("Unknown error connecting to device", exc_info=True) + except Exception: + _LOGGER.exception("Unknown error connecting to device") errors[CONF_URL] = "unknown" return conn @@ -188,8 +188,8 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN): try: conn.close() conn.requests_session.close() - except Exception: # noqa: BLE001 - _LOGGER.debug("Disconnect error", exc_info=True) + except Exception: + _LOGGER.exception("Disconnect error") async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/hue/icons.json b/homeassistant/components/hue/icons.json index 31464308b0a..646c420f1fe 100644 --- a/homeassistant/components/hue/icons.json +++ b/homeassistant/components/hue/icons.json @@ -1,4 +1,28 @@ { + "entity": { + "light": { + "hue_light": { + "state_attributes": { + "effect": { + "state": { + "candle": "mdi:candle", + "sparkle": "mdi:shimmer", + "glisten": "mdi:creation", + "sunrise": "mdi:weather-sunset-up", + "sunset": "mdi:weather-sunset", + "fire": "mdi:fire", + "prism": "mdi:triangle-outline", + "opal": "mdi:diamond-stone", + "underwater": "mdi:waves", + "cosmos": "mdi:star-shooting", + "sunbeam": "mdi:spotlight-beam", + "enchant": "mdi:magic-staff" + } + } + } + } + } + }, "services": { "hue_activate_scene": { "service": "mdi:palette" diff --git a/homeassistant/components/hue/strings.json b/homeassistant/components/hue/strings.json index 2f7f2e55561..6d2e9054c6f 100644 --- a/homeassistant/components/hue/strings.json +++ b/homeassistant/components/hue/strings.json @@ -11,7 +11,7 @@ } }, "manual": { - "title": "Manual configure a Hue bridge", + "title": "Manually configure a Hue bridge", "data": { "host": "[%key:common::config_flow::data::host%]" }, @@ -46,8 +46,8 @@ "button_2": "Second button", "button_3": "Third button", "button_4": "Fourth button", - "double_buttons_1_3": "First and Third buttons", - "double_buttons_2_4": "Second and Fourth buttons", + "double_buttons_1_3": "First and third button", + "double_buttons_2_4": "Second and fourth button", "dim_down": "Dim down", "dim_up": "Dim up", "turn_off": "[%key:common::action::turn_off%]", @@ -102,6 +102,28 @@ } } }, + "light": { + "hue_light": { + "state_attributes": { + "effect": { + "state": { + "candle": "Candle", + "sparkle": "Sparkle", + "glisten": "Glisten", + "sunrise": "Sunrise", + "sunset": "Sunset", + "fire": "Fire", + "prism": "Prism", + "opal": "Opal", + "underwater": "Underwater", + "cosmos": "Cosmos", + "sunbeam": "Sunbeam", + "enchant": "Enchant" + } + } + } + } + }, "sensor": { "zigbee_connectivity": { "name": "Zigbee connectivity", diff --git a/homeassistant/components/hue/v2/light.py b/homeassistant/components/hue/v2/light.py index 4b00299bc9d..757b69c7b7b 100644 --- a/homeassistant/components/hue/v2/light.py +++ b/homeassistant/components/hue/v2/light.py @@ -18,6 +18,7 @@ from homeassistant.components.light import ( ATTR_FLASH, ATTR_TRANSITION, ATTR_XY_COLOR, + EFFECT_OFF, FLASH_SHORT, ColorMode, LightEntity, @@ -39,7 +40,6 @@ from .helpers import ( normalize_hue_transition, ) -EFFECT_NONE = "None" FALLBACK_MIN_KELVIN = 6500 FALLBACK_MAX_KELVIN = 2000 FALLBACK_KELVIN = 5800 # halfway @@ -75,7 +75,7 @@ class HueLight(HueBaseEntity, LightEntity): _fixed_color_mode: ColorMode | None = None entity_description = LightEntityDescription( - key="hue_light", has_entity_name=True, name=None + key="hue_light", translation_key="hue_light", has_entity_name=True, name=None ) def __init__( @@ -118,7 +118,7 @@ class HueLight(HueBaseEntity, LightEntity): if x != TimedEffectStatus.NO_EFFECT ] if len(self._attr_effect_list) > 0: - self._attr_effect_list.insert(0, EFFECT_NONE) + self._attr_effect_list.insert(0, EFFECT_OFF) self._attr_supported_features |= LightEntityFeature.EFFECT @property @@ -211,7 +211,7 @@ class HueLight(HueBaseEntity, LightEntity): if timed_effects := self.resource.timed_effects: if timed_effects.status != TimedEffectStatus.NO_EFFECT: return timed_effects.status.value - return EFFECT_NONE + return EFFECT_OFF async def async_turn_on(self, **kwargs: Any) -> None: """Turn the device on.""" @@ -233,12 +233,12 @@ class HueLight(HueBaseEntity, LightEntity): self._color_temp_active = color_temp is not None flash = kwargs.get(ATTR_FLASH) effect = effect_str = kwargs.get(ATTR_EFFECT) - if effect_str in (EFFECT_NONE, EFFECT_NONE.lower()): - # ignore effect if set to "None" and we have no effect active - # the special effect "None" is only used to stop an active effect + if effect_str == EFFECT_OFF: + # ignore effect if set to "off" and we have no effect active + # the special effect "off" is only used to stop an active effect # but sending it while no effect is active can actually result in issues # https://github.com/home-assistant/core/issues/122165 - effect = None if self.effect == EFFECT_NONE else EffectStatus.NO_EFFECT + effect = None if self.effect == EFFECT_OFF else EffectStatus.NO_EFFECT elif effect_str is not None: # work out if we got a regular effect or timed effect effect = EffectStatus(effect_str) diff --git a/homeassistant/components/husqvarna_automower/config_flow.py b/homeassistant/components/husqvarna_automower/config_flow.py index 7efed529453..31ca5eef0cd 100644 --- a/homeassistant/components/husqvarna_automower/config_flow.py +++ b/homeassistant/components/husqvarna_automower/config_flow.py @@ -54,7 +54,8 @@ class HusqvarnaConfigFlowHandler( automower_api = AutomowerSession(AsyncConfigFlowAuth(websession, token), tz) try: status_data = await automower_api.get_status() - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") return self.async_abort(reason="unknown") if status_data == {}: return self.async_abort(reason="no_mower_connected") diff --git a/homeassistant/components/husqvarna_automower/sensor.py b/homeassistant/components/husqvarna_automower/sensor.py index 2e1d4041e5a..d7a83c82185 100644 --- a/homeassistant/components/husqvarna_automower/sensor.py +++ b/homeassistant/components/husqvarna_automower/sensor.py @@ -227,12 +227,16 @@ def _get_work_area_names(data: MowerAttributes) -> list[str]: @callback def _get_current_work_area_name(data: MowerAttributes) -> str: """Return the name of the current work area.""" - if data.mower.work_area_id is None: - return STATE_NO_WORK_AREA_ACTIVE if TYPE_CHECKING: # Sensor does not get created if values are None assert data.work_areas is not None - return data.work_areas[data.mower.work_area_id].name + if ( + data.mower.work_area_id is not None + and data.mower.work_area_id in data.work_areas + ): + return data.work_areas[data.mower.work_area_id].name + + return STATE_NO_WORK_AREA_ACTIVE @callback @@ -295,6 +299,18 @@ MOWER_SENSOR_TYPES: tuple[AutomowerSensorEntityDescription, ...] = ( exists_fn=lambda data: data.statistics.cutting_blade_usage_time is not None, value_fn=attrgetter("statistics.cutting_blade_usage_time"), ), + AutomowerSensorEntityDescription( + key="downtime", + translation_key="downtime", + state_class=SensorStateClass.TOTAL, + device_class=SensorDeviceClass.DURATION, + entity_registry_enabled_default=False, + native_unit_of_measurement=UnitOfTime.SECONDS, + suggested_display_precision=0, + suggested_unit_of_measurement=UnitOfTime.HOURS, + exists_fn=lambda data: data.statistics.downtime is not None, + value_fn=attrgetter("statistics.downtime"), + ), AutomowerSensorEntityDescription( key="total_charging_time", translation_key="total_charging_time", @@ -367,6 +383,18 @@ MOWER_SENSOR_TYPES: tuple[AutomowerSensorEntityDescription, ...] = ( exists_fn=lambda data: data.statistics.total_drive_distance is not None, value_fn=attrgetter("statistics.total_drive_distance"), ), + AutomowerSensorEntityDescription( + key="uptime", + translation_key="uptime", + state_class=SensorStateClass.TOTAL, + device_class=SensorDeviceClass.DURATION, + entity_registry_enabled_default=False, + native_unit_of_measurement=UnitOfTime.SECONDS, + suggested_display_precision=0, + suggested_unit_of_measurement=UnitOfTime.HOURS, + exists_fn=lambda data: data.statistics.uptime is not None, + value_fn=attrgetter("statistics.uptime"), + ), AutomowerSensorEntityDescription( key="next_start_timestamp", translation_key="next_start_timestamp", diff --git a/homeassistant/components/husqvarna_automower/strings.json b/homeassistant/components/husqvarna_automower/strings.json index 9bd0bb06b3e..35ce342867f 100644 --- a/homeassistant/components/husqvarna_automower/strings.json +++ b/homeassistant/components/husqvarna_automower/strings.json @@ -221,6 +221,9 @@ "cutting_blade_usage_time": { "name": "Cutting blade usage time" }, + "downtime": { + "name": "Downtime" + }, "restricted_reason": { "name": "Restricted reason", "state": { @@ -263,6 +266,9 @@ "demo": "Demo" } }, + "uptime": { + "name": "Uptime" + }, "work_area": { "name": "Work area", "state": { diff --git a/homeassistant/components/iaqualink/entity.py b/homeassistant/components/iaqualink/entity.py index 437611e5a5f..d0176ed8bfe 100644 --- a/homeassistant/components/iaqualink/entity.py +++ b/homeassistant/components/iaqualink/entity.py @@ -32,7 +32,6 @@ class AqualinkEntity(Entity): manufacturer=dev.manufacturer, model=dev.model, name=dev.label, - via_device=(DOMAIN, dev.system.serial), ) async def async_added_to_hass(self) -> None: diff --git a/homeassistant/components/iaqualink/manifest.json b/homeassistant/components/iaqualink/manifest.json index 2531632075c..7e05bd72f0b 100644 --- a/homeassistant/components/iaqualink/manifest.json +++ b/homeassistant/components/iaqualink/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/iaqualink", "iot_class": "cloud_polling", "loggers": ["iaqualink"], - "requirements": ["iaqualink==0.5.0", "h2==4.1.0"], + "requirements": ["iaqualink==0.5.3", "h2==4.1.0"], "single_config_entry": true } diff --git a/homeassistant/components/idasen_desk/strings.json b/homeassistant/components/idasen_desk/strings.json index 7486973638b..ff0cb5b8ae6 100644 --- a/homeassistant/components/idasen_desk/strings.json +++ b/homeassistant/components/idasen_desk/strings.json @@ -7,7 +7,7 @@ "address": "Device" }, "data_description": { - "address": "The bluetooth device for the desk." + "address": "The Bluetooth device for the desk." } } }, @@ -26,10 +26,10 @@ "entity": { "button": { "connect": { - "name": "Connect" + "name": "[%key:common::action::connect%]" }, "disconnect": { - "name": "Disconnect" + "name": "[%key:common::action::disconnect%]" } }, "sensor": { diff --git a/homeassistant/components/imgw_pib/config_flow.py b/homeassistant/components/imgw_pib/config_flow.py index 558528fcbef..805bfa2ccb3 100644 --- a/homeassistant/components/imgw_pib/config_flow.py +++ b/homeassistant/components/imgw_pib/config_flow.py @@ -50,7 +50,7 @@ class ImgwPibFlowHandler(ConfigFlow, domain=DOMAIN): hydrological_data = await imgwpib.get_hydrological_data() except (ClientError, TimeoutError, ApiError): errors["base"] = "cannot_connect" - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: diff --git a/homeassistant/components/imgw_pib/manifest.json b/homeassistant/components/imgw_pib/manifest.json index 0ecc1b4b7d0..3d8b34055fd 100644 --- a/homeassistant/components/imgw_pib/manifest.json +++ b/homeassistant/components/imgw_pib/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/imgw_pib", "iot_class": "cloud_polling", - "requirements": ["imgw_pib==1.0.9"] + "requirements": ["imgw_pib==1.0.10"] } diff --git a/homeassistant/components/imgw_pib/sensor.py b/homeassistant/components/imgw_pib/sensor.py index 33b82bbb43b..7871006b2ae 100644 --- a/homeassistant/components/imgw_pib/sensor.py +++ b/homeassistant/components/imgw_pib/sensor.py @@ -24,7 +24,8 @@ from .const import DOMAIN from .coordinator import ImgwPibConfigEntry, ImgwPibDataUpdateCoordinator from .entity import ImgwPibEntity -PARALLEL_UPDATES = 1 +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/imgw_pib/strings.json b/homeassistant/components/imgw_pib/strings.json index 89be0661c6f..33cd3cb3917 100644 --- a/homeassistant/components/imgw_pib/strings.json +++ b/homeassistant/components/imgw_pib/strings.json @@ -4,6 +4,9 @@ "user": { "data": { "station_id": "Hydrological station" + }, + "data_description": { + "station_id": "Select a hydrological station from the list." } } }, diff --git a/homeassistant/components/incomfort/config_flow.py b/homeassistant/components/incomfort/config_flow.py index 875bc25bd2f..027c3ad4691 100644 --- a/homeassistant/components/incomfort/config_flow.py +++ b/homeassistant/components/incomfort/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +import logging from typing import Any from incomfortclient import InvalidGateway, InvalidHeaterList @@ -31,6 +32,7 @@ from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo from .const import CONF_LEGACY_SETPOINT_STATUS, DOMAIN from .coordinator import InComfortConfigEntry, async_connect_gateway +_LOGGER = logging.getLogger(__name__) TITLE = "Intergas InComfort/Intouch Lan2RF gateway" CONFIG_SCHEMA = vol.Schema( @@ -88,7 +90,8 @@ async def async_try_connect_gateway( return {"base": "no_heaters"} except TimeoutError: return {"base": "timeout_error"} - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") return {"base": "unknown"} return None diff --git a/homeassistant/components/incomfort/strings.json b/homeassistant/components/incomfort/strings.json index 73ba88078a8..31fec77f455 100644 --- a/homeassistant/components/incomfort/strings.json +++ b/homeassistant/components/incomfort/strings.json @@ -118,7 +118,7 @@ "tapwater_int": "Tap water internal", "sensor_test": "Sensor test", "central_heating": "Central heating", - "standby": "Stand-by", + "standby": "[%key:common::state::standby%]", "postrun_boyler": "Post run boiler", "service": "Service", "tapwater": "Tap water", diff --git a/homeassistant/components/iometer/__init__.py b/homeassistant/components/iometer/__init__.py index bbf046e70e9..feb7ce9b8cf 100644 --- a/homeassistant/components/iometer/__init__.py +++ b/homeassistant/components/iometer/__init__.py @@ -12,7 +12,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from .coordinator import IOmeterConfigEntry, IOMeterCoordinator -PLATFORMS: list[Platform] = [Platform.SENSOR] +PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SENSOR] async def async_setup_entry(hass: HomeAssistant, entry: IOmeterConfigEntry) -> bool: diff --git a/homeassistant/components/iometer/binary_sensor.py b/homeassistant/components/iometer/binary_sensor.py new file mode 100644 index 00000000000..f443c4ae94a --- /dev/null +++ b/homeassistant/components/iometer/binary_sensor.py @@ -0,0 +1,87 @@ +"""IOmeter binary sensor.""" + +from collections.abc import Callable +from dataclasses import dataclass + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .coordinator import IOMeterCoordinator, IOmeterData +from .entity import IOmeterEntity + + +@dataclass(frozen=True, kw_only=True) +class IOmeterBinarySensorDescription(BinarySensorEntityDescription): + """Describes Iometer binary sensor entity.""" + + value_fn: Callable[[IOmeterData], bool | None] + + +SENSOR_TYPES: list[IOmeterBinarySensorDescription] = [ + IOmeterBinarySensorDescription( + key="connection_status", + translation_key="connection_status", + device_class=BinarySensorDeviceClass.CONNECTIVITY, + entity_registry_enabled_default=False, + value_fn=lambda data: ( + data.status.device.core.connection_status == "connected" + if data.status.device.core.connection_status is not None + else None + ), + ), + IOmeterBinarySensorDescription( + key="attachment_status", + translation_key="attachment_status", + device_class=BinarySensorDeviceClass.CONNECTIVITY, + entity_registry_enabled_default=False, + value_fn=lambda data: ( + data.status.device.core.attachment_status == "attached" + if data.status.device.core.attachment_status is not None + else None + ), + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up the Sensors.""" + coordinator: IOMeterCoordinator = config_entry.runtime_data + + async_add_entities( + IOmeterBinarySensor( + coordinator=coordinator, + description=description, + ) + for description in SENSOR_TYPES + ) + + +class IOmeterBinarySensor(IOmeterEntity, BinarySensorEntity): + """Defines a IOmeter binary sensor.""" + + entity_description: IOmeterBinarySensorDescription + + def __init__( + self, + coordinator: IOMeterCoordinator, + description: IOmeterBinarySensorDescription, + ) -> None: + """Initialize the sensor.""" + super().__init__(coordinator) + self.entity_description = description + self._attr_unique_id = f"{coordinator.identifier}_{description.key}" + + @property + def is_on(self) -> bool | None: + """Return the binary sensor state.""" + return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/iometer/strings.json b/homeassistant/components/iometer/strings.json index 31deb16aa9c..b3878dd1b53 100644 --- a/homeassistant/components/iometer/strings.json +++ b/homeassistant/components/iometer/strings.json @@ -60,6 +60,14 @@ "wifi_rssi": { "name": "Signal strength Wi-Fi" } + }, + "binary_sensor": { + "connection_status": { + "name": "Core/Bridge connection status" + }, + "attachment_status": { + "name": "Core attachment status" + } } } } diff --git a/homeassistant/components/ipp/strings.json b/homeassistant/components/ipp/strings.json index ac879ef0ab3..b4c092c8ae3 100644 --- a/homeassistant/components/ipp/strings.json +++ b/homeassistant/components/ipp/strings.json @@ -38,7 +38,7 @@ "state": { "printing": "Printing", "idle": "[%key:common::state::idle%]", - "stopped": "Stopped" + "stopped": "[%key:common::state::stopped%]" } }, "uptime": { diff --git a/homeassistant/components/ista_ecotrend/sensor.py b/homeassistant/components/ista_ecotrend/sensor.py index ee54e502c26..0a8ed6e9ddb 100644 --- a/homeassistant/components/ista_ecotrend/sensor.py +++ b/homeassistant/components/ista_ecotrend/sensor.py @@ -8,6 +8,7 @@ import datetime from enum import StrEnum import logging +from homeassistant.components.recorder.models import StatisticMeanType from homeassistant.components.recorder.models.statistics import ( StatisticData, StatisticMetaData, @@ -270,7 +271,7 @@ class IstaSensor(CoordinatorEntity[IstaCoordinator], SensorEntity): ] metadata: StatisticMetaData = { - "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": f"{self.device_entry.name} {self.name}", "source": DOMAIN, diff --git a/homeassistant/components/isy994/__init__.py b/homeassistant/components/isy994/__init__.py index 738c7e2d5ad..1e227b08206 100644 --- a/homeassistant/components/isy994/__init__.py +++ b/homeassistant/components/isy994/__init__.py @@ -138,7 +138,7 @@ async def async_setup_entry( for vtype, _, vid in isy.variables.children: numbers.append(isy.variables[vtype][vid]) if ( - isy.conf[CONFIG_NETWORKING] or isy.conf[CONFIG_PORTAL] + isy.conf[CONFIG_NETWORKING] or isy.conf.get(CONFIG_PORTAL) ) and isy.networking.nobjs: isy_data.devices[CONF_NETWORK] = _create_service_device_info( isy, name=CONFIG_NETWORKING, unique_id=CONF_NETWORK @@ -227,9 +227,9 @@ async def async_unload_entry( """Unload a config entry.""" unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - isy_data = hass.data[DOMAIN][entry.entry_id] + isy_data: IsyData = hass.data[DOMAIN][entry.entry_id] - isy: ISY = isy_data.root + isy = isy_data.root _LOGGER.debug("ISY Stopping Event Stream and automatic updates") isy.websocket.stop() diff --git a/homeassistant/components/isy994/entity.py b/homeassistant/components/isy994/entity.py index 1da727fdee8..d170854396c 100644 --- a/homeassistant/components/isy994/entity.py +++ b/homeassistant/components/isy994/entity.py @@ -181,6 +181,7 @@ class ISYProgramEntity(ISYEntity): _actions: Program _status: Program + _node: Program def __init__(self, name: str, status: Program, actions: Program = None) -> None: """Initialize the ISY program-based entity.""" diff --git a/homeassistant/components/isy994/manifest.json b/homeassistant/components/isy994/manifest.json index 3aa81027b4f..5cd3bb73a89 100644 --- a/homeassistant/components/isy994/manifest.json +++ b/homeassistant/components/isy994/manifest.json @@ -24,7 +24,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["pyisy"], - "requirements": ["pyisy==3.1.14"], + "requirements": ["pyisy==3.4.0"], "ssdp": [ { "manufacturer": "Universal Devices Inc.", diff --git a/homeassistant/components/isy994/services.py b/homeassistant/components/isy994/services.py index 6546aec6efa..24cfa9aefb1 100644 --- a/homeassistant/components/isy994/services.py +++ b/homeassistant/components/isy994/services.py @@ -21,6 +21,7 @@ from homeassistant.helpers.service import entity_service_call from homeassistant.helpers.typing import VolDictType from .const import _LOGGER, DOMAIN +from .models import IsyData # Common Services for All Platforms: SERVICE_SEND_PROGRAM_COMMAND = "send_program_command" @@ -149,7 +150,7 @@ def async_setup_services(hass: HomeAssistant) -> None: isy_name = service.data.get(CONF_ISY) for config_entry_id in hass.data[DOMAIN]: - isy_data = hass.data[DOMAIN][config_entry_id] + isy_data: IsyData = hass.data[DOMAIN][config_entry_id] isy = isy_data.root if isy_name and isy_name != isy.conf["name"]: continue diff --git a/homeassistant/components/isy994/switch.py b/homeassistant/components/isy994/switch.py index 946feddcd10..d5c8a23cbea 100644 --- a/homeassistant/components/isy994/switch.py +++ b/homeassistant/components/isy994/switch.py @@ -157,7 +157,7 @@ class ISYEnableSwitchEntity(ISYAuxControlEntity, SwitchEntity): device_info=device_info, ) self._attr_name = description.name # Override super - self._change_handler: EventListener = None + self._change_handler: EventListener | None = None # pylint: disable-next=hass-missing-super-call async def async_added_to_hass(self) -> None: diff --git a/homeassistant/components/jellyfin/client_wrapper.py b/homeassistant/components/jellyfin/client_wrapper.py index ab5d5e7d7f8..91fe0885e4c 100644 --- a/homeassistant/components/jellyfin/client_wrapper.py +++ b/homeassistant/components/jellyfin/client_wrapper.py @@ -97,16 +97,27 @@ def get_artwork_url( client: JellyfinClient, item: dict[str, Any], max_width: int = 600 ) -> str | None: """Find a suitable thumbnail for an item.""" - artwork_id: str = item["Id"] - artwork_type = "Primary" + artwork_id: str | None = None + artwork_type: str | None = None parent_backdrop_id: str | None = item.get("ParentBackdropItemId") - if "Backdrop" in item[ITEM_KEY_IMAGE_TAGS]: + if "AlbumPrimaryImageTag" in item: + # jellyfin_apiclient_python doesn't support passing a specific tag to `.artwork`, + # so we don't use the actual value of AlbumPrimaryImageTag. + # However, its mere presence tells us that the album does have primary artwork, + # and the resulting URL will pull the primary album art even if the tag is not specified. + artwork_type = "Primary" + artwork_id = item["AlbumId"] + elif "Backdrop" in item[ITEM_KEY_IMAGE_TAGS]: artwork_type = "Backdrop" + artwork_id = item["Id"] elif parent_backdrop_id: artwork_type = "Backdrop" artwork_id = parent_backdrop_id - elif "Primary" not in item[ITEM_KEY_IMAGE_TAGS]: + elif "Primary" in item[ITEM_KEY_IMAGE_TAGS]: + artwork_type = "Primary" + artwork_id = item["Id"] + else: return None return str(client.jellyfin.artwork(artwork_id, artwork_type, max_width)) diff --git a/homeassistant/components/jewish_calendar/__init__.py b/homeassistant/components/jewish_calendar/__init__.py index 9f7ec6ba976..47d60d74938 100644 --- a/homeassistant/components/jewish_calendar/__init__.py +++ b/homeassistant/components/jewish_calendar/__init__.py @@ -16,7 +16,8 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import config_validation as cv, entity_registry as er +from homeassistant.helpers.typing import ConfigType from .const import ( CONF_CANDLE_LIGHT_MINUTES, @@ -26,11 +27,21 @@ from .const import ( DEFAULT_DIASPORA, DEFAULT_HAVDALAH_OFFSET_MINUTES, DEFAULT_LANGUAGE, + DOMAIN, ) from .entity import JewishCalendarConfigEntry, JewishCalendarData +from .service import async_setup_services _LOGGER = logging.getLogger(__name__) PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SENSOR] +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) + + +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the Jewish Calendar service.""" + async_setup_services(hass) + + return True async def async_setup_entry( @@ -113,8 +124,8 @@ async def async_migrate_entry( "first_stars": "tset_hakohavim_tsom", "three_stars": "tset_hakohavim_shabbat", } - new_keys = tuple(key_translations.values()) - if not entity_entry.unique_id.endswith(new_keys): + old_keys = tuple(key_translations.keys()) + if entity_entry.unique_id.endswith(old_keys): old_key = entity_entry.unique_id.split("-")[1] new_unique_id = f"{config_entry.entry_id}-{key_translations[old_key]}" return {"new_unique_id": new_unique_id} diff --git a/homeassistant/components/jewish_calendar/const.py b/homeassistant/components/jewish_calendar/const.py index 4af76a8927b..0d5455fcd86 100644 --- a/homeassistant/components/jewish_calendar/const.py +++ b/homeassistant/components/jewish_calendar/const.py @@ -2,6 +2,9 @@ DOMAIN = "jewish_calendar" +ATTR_DATE = "date" +ATTR_NUSACH = "nusach" + CONF_DIASPORA = "diaspora" CONF_CANDLE_LIGHT_MINUTES = "candle_lighting_minutes_before_sunset" CONF_HAVDALAH_OFFSET_MINUTES = "havdalah_minutes_after_sunset" @@ -11,3 +14,5 @@ DEFAULT_CANDLE_LIGHT = 18 DEFAULT_DIASPORA = False DEFAULT_HAVDALAH_OFFSET_MINUTES = 0 DEFAULT_LANGUAGE = "english" + +SERVICE_COUNT_OMER = "count_omer" diff --git a/homeassistant/components/jewish_calendar/icons.json b/homeassistant/components/jewish_calendar/icons.json new file mode 100644 index 00000000000..24b922df7a2 --- /dev/null +++ b/homeassistant/components/jewish_calendar/icons.json @@ -0,0 +1,7 @@ +{ + "services": { + "count_omer": { + "service": "mdi:counter" + } + } +} diff --git a/homeassistant/components/jewish_calendar/service.py b/homeassistant/components/jewish_calendar/service.py new file mode 100644 index 00000000000..7c3c7a21f1c --- /dev/null +++ b/homeassistant/components/jewish_calendar/service.py @@ -0,0 +1,63 @@ +"""Services for Jewish Calendar.""" + +import datetime +from typing import cast + +from hdate import HebrewDate +from hdate.omer import Nusach, Omer +from hdate.translator import Language +import voluptuous as vol + +from homeassistant.const import CONF_LANGUAGE +from homeassistant.core import ( + HomeAssistant, + ServiceCall, + ServiceResponse, + SupportsResponse, +) +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.selector import LanguageSelector, LanguageSelectorConfig + +from .const import ATTR_DATE, ATTR_NUSACH, DOMAIN, SERVICE_COUNT_OMER + +SUPPORTED_LANGUAGES = {"en": "english", "fr": "french", "he": "hebrew"} +OMER_SCHEMA = vol.Schema( + { + vol.Required(ATTR_DATE, default=datetime.date.today): cv.date, + vol.Required(ATTR_NUSACH, default="sfarad"): vol.In( + [nusach.name.lower() for nusach in Nusach] + ), + vol.Required(CONF_LANGUAGE, default="he"): LanguageSelector( + LanguageSelectorConfig(languages=list(SUPPORTED_LANGUAGES.keys())) + ), + } +) + + +def async_setup_services(hass: HomeAssistant) -> None: + """Set up the Jewish Calendar services.""" + + async def get_omer_count(call: ServiceCall) -> ServiceResponse: + """Return the Omer blessing for a given date.""" + hebrew_date = HebrewDate.from_gdate(call.data["date"]) + nusach = Nusach[call.data["nusach"].upper()] + + # Currently Omer only supports Hebrew, English, and French and requires + # the full language name + language = cast(Language, SUPPORTED_LANGUAGES[call.data[CONF_LANGUAGE]]) + + omer = Omer(date=hebrew_date, nusach=nusach, language=language) + return { + "message": str(omer.count_str()), + "weeks": omer.week, + "days": omer.day, + "total_days": omer.total_days, + } + + hass.services.async_register( + DOMAIN, + SERVICE_COUNT_OMER, + get_omer_count, + schema=OMER_SCHEMA, + supports_response=SupportsResponse.ONLY, + ) diff --git a/homeassistant/components/jewish_calendar/services.yaml b/homeassistant/components/jewish_calendar/services.yaml new file mode 100644 index 00000000000..894fa30fee3 --- /dev/null +++ b/homeassistant/components/jewish_calendar/services.yaml @@ -0,0 +1,29 @@ +count_omer: + fields: + date: + required: true + example: "2025-04-14" + selector: + date: + nusach: + required: true + example: "sfarad" + default: "sfarad" + selector: + select: + translation_key: "nusach" + options: + - "sfarad" + - "ashkenaz" + - "adot_mizrah" + - "italian" + language: + required: true + default: "he" + example: "he" + selector: + language: + languages: + - "en" + - "he" + - "fr" diff --git a/homeassistant/components/jewish_calendar/strings.json b/homeassistant/components/jewish_calendar/strings.json index 1b7b86c0056..933d77d2188 100644 --- a/homeassistant/components/jewish_calendar/strings.json +++ b/homeassistant/components/jewish_calendar/strings.json @@ -3,9 +3,9 @@ "sensor": { "hebrew_date": { "state_attributes": { - "hebrew_year": { "name": "Hebrew Year" }, - "hebrew_month_name": { "name": "Hebrew Month Name" }, - "hebrew_day": { "name": "Hebrew Day" } + "hebrew_year": { "name": "Hebrew year" }, + "hebrew_month_name": { "name": "Hebrew month name" }, + "hebrew_day": { "name": "Hebrew day" } } } } @@ -16,10 +16,10 @@ "data": { "name": "[%key:common::config_flow::data::name%]", "diaspora": "Outside of Israel?", - "language": "Language for Holidays and Dates", + "language": "Language for holidays and dates", "location": "[%key:common::config_flow::data::location%]", "elevation": "[%key:common::config_flow::data::elevation%]", - "time_zone": "Time Zone" + "time_zone": "Time zone" }, "data_description": { "time_zone": "If you specify a location, make sure to specify the time zone for correct calendar times calculations" @@ -36,7 +36,7 @@ "init": { "title": "Configure options for Jewish Calendar", "data": { - "candle_lighting_minutes_before_sunset": "Minutes before sunset for candle lighthing", + "candle_lighting_minutes_before_sunset": "Minutes before sunset for candle lighting", "havdalah_minutes_after_sunset": "Minutes after sunset for Havdalah" }, "data_description": { @@ -45,5 +45,35 @@ } } } + }, + "selector": { + "nusach": { + "options": { + "sfarad": "Sfarad", + "ashkenaz": "Ashkenaz", + "adot_mizrah": "Adot Mizrah", + "italian": "Italian" + } + } + }, + "services": { + "count_omer": { + "name": "Count the Omer", + "description": "Returns the phrase for counting the Omer on a given date.", + "fields": { + "date": { + "name": "Date", + "description": "Date to count the Omer for." + }, + "nusach": { + "name": "Nusach", + "description": "Nusach to count the Omer in." + }, + "language": { + "name": "[%key:common::config_flow::data::language%]", + "description": "Language to count the Omer in." + } + } + } } } diff --git a/homeassistant/components/kitchen_sink/__init__.py b/homeassistant/components/kitchen_sink/__init__.py index de8e521f0e8..2f876ca855d 100644 --- a/homeassistant/components/kitchen_sink/__init__.py +++ b/homeassistant/components/kitchen_sink/__init__.py @@ -12,14 +12,24 @@ from random import random import voluptuous as vol from homeassistant.components.recorder import DOMAIN as RECORDER_DOMAIN, get_instance -from homeassistant.components.recorder.models import StatisticData, StatisticMetaData +from homeassistant.components.recorder.models import ( + StatisticData, + StatisticMeanType, + StatisticMetaData, +) from homeassistant.components.recorder.statistics import ( async_add_external_statistics, async_import_statistics, get_last_statistics, ) from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.const import Platform, UnitOfEnergy, UnitOfTemperature, UnitOfVolume +from homeassistant.const import ( + DEGREE, + Platform, + UnitOfEnergy, + UnitOfTemperature, + UnitOfVolume, +) from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue @@ -72,6 +82,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set the config entry up.""" + if "recorder" in hass.config.components: + # Insert stats for mean_type_changed issue + await _insert_wrong_wind_direction_statistics(hass) + # Set up demo platforms with config entry await hass.config_entries.async_forward_entry_setups( entry, COMPONENTS_WITH_DEMO_PLATFORM @@ -233,7 +247,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None: "name": "Outdoor temperature", "statistic_id": f"{DOMAIN}:temperature_outdoor", "unit_of_measurement": UnitOfTemperature.CELSIUS, - "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, } statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 15, 1) @@ -246,7 +260,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None: "name": "Energy consumption 1", "statistic_id": f"{DOMAIN}:energy_consumption_kwh", "unit_of_measurement": UnitOfEnergy.KILO_WATT_HOUR, - "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, } await _insert_sum_statistics(hass, metadata, yesterday_midnight, today_midnight, 1) @@ -258,7 +272,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None: "name": "Energy consumption 2", "statistic_id": f"{DOMAIN}:energy_consumption_mwh", "unit_of_measurement": UnitOfEnergy.MEGA_WATT_HOUR, - "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, } await _insert_sum_statistics( @@ -272,7 +286,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None: "name": "Gas consumption 1", "statistic_id": f"{DOMAIN}:gas_consumption_m3", "unit_of_measurement": UnitOfVolume.CUBIC_METERS, - "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, } await _insert_sum_statistics( @@ -286,7 +300,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None: "name": "Gas consumption 2", "statistic_id": f"{DOMAIN}:gas_consumption_ft3", "unit_of_measurement": UnitOfVolume.CUBIC_FEET, - "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, } await _insert_sum_statistics(hass, metadata, yesterday_midnight, today_midnight, 15) @@ -298,7 +312,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None: "name": None, "statistic_id": "sensor.statistics_issues_issue_1", "unit_of_measurement": UnitOfVolume.CUBIC_METERS, - "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, } statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 15, 1) @@ -310,7 +324,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None: "name": None, "statistic_id": "sensor.statistics_issues_issue_2", "unit_of_measurement": "cats", - "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, } statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 15, 1) @@ -322,7 +336,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None: "name": None, "statistic_id": "sensor.statistics_issues_issue_3", "unit_of_measurement": UnitOfVolume.CUBIC_METERS, - "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, } statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 15, 1) @@ -334,8 +348,28 @@ async def _insert_statistics(hass: HomeAssistant) -> None: "name": None, "statistic_id": "sensor.statistics_issues_issue_4", "unit_of_measurement": UnitOfVolume.CUBIC_METERS, - "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, } statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 15, 1) async_import_statistics(hass, metadata, statistics) + + +async def _insert_wrong_wind_direction_statistics(hass: HomeAssistant) -> None: + """Insert some fake wind direction statistics.""" + now = dt_util.now() + yesterday = now - datetime.timedelta(days=1) + yesterday_midnight = yesterday.replace(hour=0, minute=0, second=0, microsecond=0) + today_midnight = yesterday_midnight + datetime.timedelta(days=1) + + # Add some statistics required to raise the mean_type_changed issue later + metadata: StatisticMetaData = { + "source": RECORDER_DOMAIN, + "name": None, + "statistic_id": "sensor.statistics_issues_issue_5", + "unit_of_measurement": DEGREE, + "mean_type": StatisticMeanType.ARITHMETIC, + "has_sum": False, + } + statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 0, 360) + async_import_statistics(hass, metadata, statistics) diff --git a/homeassistant/components/kitchen_sink/sensor.py b/homeassistant/components/kitchen_sink/sensor.py index 19d1b31aeab..04cb833f0df 100644 --- a/homeassistant/components/kitchen_sink/sensor.py +++ b/homeassistant/components/kitchen_sink/sensor.py @@ -8,7 +8,7 @@ from homeassistant.components.sensor import ( SensorStateClass, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import UnitOfPower +from homeassistant.const import DEGREE, UnitOfPower from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback @@ -87,6 +87,16 @@ async def async_setup_entry( state_class=None, unit_of_measurement=UnitOfPower.WATT, ), + DemoSensor( + device_unique_id="statistics_issues", + unique_id="statistics_issue_5", + device_name="Statistics issues", + entity_name="Issue 5", + state=100, + device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + unit_of_measurement=DEGREE, + ), ] ) diff --git a/homeassistant/components/knx/strings.json b/homeassistant/components/knx/strings.json index 10730d87ed1..b13667a65b0 100644 --- a/homeassistant/components/knx/strings.json +++ b/homeassistant/components/knx/strings.json @@ -316,10 +316,10 @@ "name": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::name%]", "state": { "auto": "Auto", + "building_protection": "Building protection", "comfort": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::comfort%]", - "standby": "Standby", "economy": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::eco%]", - "building_protection": "Building protection" + "standby": "[%key:common::state::standby%]" } } } diff --git a/homeassistant/components/konnected/strings.json b/homeassistant/components/konnected/strings.json index e1a6863a199..df92e014f12 100644 --- a/homeassistant/components/konnected/strings.json +++ b/homeassistant/components/konnected/strings.json @@ -2,19 +2,19 @@ "config": { "step": { "import_confirm": { - "title": "Import Konnected Device", - "description": "A Konnected Alarm Panel with ID {id} has been discovered in configuration.yaml. This flow will allow you to import it into a config entry." + "title": "Import Konnected device", + "description": "A Konnected alarm panel with ID {id} has been discovered in configuration.yaml. This flow will allow you to import it into a config entry." }, "user": { - "description": "Please enter the host information for your Konnected Panel.", + "description": "Please enter the host information for your Konnected panel.", "data": { "host": "[%key:common::config_flow::data::ip%]", "port": "[%key:common::config_flow::data::port%]" } }, "confirm": { - "title": "Konnected Device Ready", - "description": "Model: {model}\nID: {id}\nHost: {host}\nPort: {port}\n\nYou can configure the IO and panel behavior in the Konnected Alarm Panel settings." + "title": "Konnected device ready", + "description": "Model: {model}\nID: {id}\nHost: {host}\nPort: {port}\n\nYou can configure the IO and panel behavior in the Konnected alarm panel settings." } }, "error": { @@ -45,8 +45,8 @@ } }, "options_io_ext": { - "title": "Configure Extended I/O", - "description": "Select the configuration of the remaining I/O below. You'll be able to configure detailed options in the next steps.", + "title": "Configure extended I/O", + "description": "Select the configuration of the remaining I/O below. You'll be able to configure detailed options in the next steps.", "data": { "8": "Zone 8", "9": "Zone 9", @@ -59,25 +59,25 @@ } }, "options_binary": { - "title": "Configure Binary Sensor", + "title": "Configure binary sensor", "description": "{zone} options", "data": { - "type": "Binary Sensor Type", + "type": "Binary sensor type", "name": "[%key:common::config_flow::data::name%]", "inverse": "Invert the open/close state" } }, "options_digital": { - "title": "Configure Digital Sensor", + "title": "Configure digital sensor", "description": "[%key:component::konnected::options::step::options_binary::description%]", "data": { - "type": "Sensor Type", + "type": "Sensor type", "name": "[%key:common::config_flow::data::name%]", - "poll_interval": "Poll Interval (minutes)" + "poll_interval": "Poll interval (minutes)" } }, "options_switch": { - "title": "Configure Switchable Output", + "title": "Configure switchable output", "description": "{zone} options: state {state}", "data": { "name": "[%key:common::config_flow::data::name%]", @@ -89,18 +89,18 @@ } }, "options_misc": { - "title": "Configure Misc", + "title": "Configure misc", "description": "Please select the desired behavior for your panel", "data": { "discovery": "Respond to discovery requests on your network", "blink": "Blink panel LED on when sending state change", - "override_api_host": "Override default Home Assistant API host panel URL", - "api_host": "Override API host URL" + "override_api_host": "Override default Home Assistant API host URL", + "api_host": "Custom API host URL" } } }, "error": { - "bad_host": "Invalid Override API host URL" + "bad_host": "Invalid custom API host URL" }, "abort": { "not_konn_panel": "[%key:component::konnected::config::abort::not_konn_panel%]" diff --git a/homeassistant/components/lacrosse_view/sensor.py b/homeassistant/components/lacrosse_view/sensor.py index 667fcbb8dcc..dde8dfd54a2 100644 --- a/homeassistant/components/lacrosse_view/sensor.py +++ b/homeassistant/components/lacrosse_view/sensor.py @@ -106,6 +106,7 @@ SENSOR_DESCRIPTIONS = { native_unit_of_measurement=DEGREE, suggested_display_precision=2, device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), "WetDry": LaCrosseSensorEntityDescription( key="WetDry", diff --git a/homeassistant/components/lamarzocco/strings.json b/homeassistant/components/lamarzocco/strings.json index 04853b8d0ca..f087856dbed 100644 --- a/homeassistant/components/lamarzocco/strings.json +++ b/homeassistant/components/lamarzocco/strings.json @@ -146,7 +146,7 @@ "prebrew_infusion_select": { "name": "Prebrew/-infusion mode", "state": { - "disabled": "Disabled", + "disabled": "[%key:common::state::disabled%]", "prebrew": "Prebrew", "prebrew_enabled": "Prebrew", "preinfusion": "Preinfusion" diff --git a/homeassistant/components/lastfm/config_flow.py b/homeassistant/components/lastfm/config_flow.py index 0e1f680dd63..ca40aebd0d4 100644 --- a/homeassistant/components/lastfm/config_flow.py +++ b/homeassistant/components/lastfm/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +import logging from typing import Any from pylast import LastFMNetwork, PyLastError, User, WSError @@ -32,6 +33,8 @@ CONFIG_SCHEMA: vol.Schema = vol.Schema( } ) +_LOGGER = logging.getLogger(__name__) + def get_lastfm_user(api_key: str, username: str) -> tuple[User, dict[str, str]]: """Get and validate lastFM User.""" @@ -49,7 +52,8 @@ def get_lastfm_user(api_key: str, username: str) -> tuple[User, dict[str, str]]: errors["base"] = "invalid_auth" else: errors["base"] = "unknown" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" return user, errors diff --git a/homeassistant/components/led_ble/light.py b/homeassistant/components/led_ble/light.py index 14f2f228e13..2facda734d5 100644 --- a/homeassistant/components/led_ble/light.py +++ b/homeassistant/components/led_ble/light.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import Any +from typing import Any, cast from led_ble import LEDBLE @@ -83,7 +83,7 @@ class LEDBLEEntity(CoordinatorEntity[DataUpdateCoordinator[None]], LightEntity): async def async_turn_on(self, **kwargs: Any) -> None: """Instruct the light to turn on.""" - brightness = kwargs.get(ATTR_BRIGHTNESS, self.brightness) + brightness = cast(int, kwargs.get(ATTR_BRIGHTNESS, self.brightness)) if effect := kwargs.get(ATTR_EFFECT): await self._async_set_effect(effect, brightness) return diff --git a/homeassistant/components/lg_thinq/number.py b/homeassistant/components/lg_thinq/number.py index 7003519e0ce..ac8991d6bb5 100644 --- a/homeassistant/components/lg_thinq/number.py +++ b/homeassistant/components/lg_thinq/number.py @@ -123,6 +123,9 @@ DEVICE_TYPE_NUMBER_MAP: dict[DeviceType, tuple[NumberEntityDescription, ...]] = NUMBER_DESC[ThinQProperty.LIGHT_STATUS], NUMBER_DESC[ThinQProperty.TARGET_TEMPERATURE], ), + DeviceType.VENTILATOR: ( + TIMER_NUMBER_DESC[ThinQProperty.SLEEP_TIMER_RELATIVE_HOUR_TO_STOP], + ), } _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/lg_thinq/strings.json b/homeassistant/components/lg_thinq/strings.json index e1d3779f44b..09e3718af9b 100644 --- a/homeassistant/components/lg_thinq/strings.json +++ b/homeassistant/components/lg_thinq/strings.json @@ -19,7 +19,7 @@ "description": "Please enter a ThinQ [PAT(Personal Access Token)]({pat_url}) created with your LG ThinQ account.", "data": { "access_token": "Personal Access Token", - "country": "Country" + "country": "[%key:common::config_flow::data::country%]" } } } diff --git a/homeassistant/components/light/__init__.py b/homeassistant/components/light/__init__.py index 637ba45c7d9..7b548533058 100644 --- a/homeassistant/components/light/__init__.py +++ b/homeassistant/components/light/__init__.py @@ -465,7 +465,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: ): params.pop(_DEPRECATED_ATTR_COLOR_TEMP.value) color_temp = params.pop(ATTR_COLOR_TEMP_KELVIN) - brightness = params.get(ATTR_BRIGHTNESS, light.brightness) + brightness = cast(int, params.get(ATTR_BRIGHTNESS, light.brightness)) params[ATTR_RGBWW_COLOR] = color_util.color_temperature_to_rgbww( color_temp, brightness, diff --git a/homeassistant/components/light/icons.json b/homeassistant/components/light/icons.json index df98def090e..6218c733f4c 100644 --- a/homeassistant/components/light/icons.json +++ b/homeassistant/components/light/icons.json @@ -1,7 +1,15 @@ { "entity_component": { "_": { - "default": "mdi:lightbulb" + "default": "mdi:lightbulb", + "state_attributes": { + "effect": { + "default": "mdi:circle-medium", + "state": { + "off": "mdi:star-off" + } + } + } } }, "services": { diff --git a/homeassistant/components/light/strings.json b/homeassistant/components/light/strings.json index c0f658c3a44..4a3b98ded46 100644 --- a/homeassistant/components/light/strings.json +++ b/homeassistant/components/light/strings.json @@ -93,7 +93,10 @@ "name": "Color temperature (Kelvin)" }, "effect": { - "name": "Effect" + "name": "Effect", + "state": { + "off": "[%key:common::state::off%]" + } }, "effect_list": { "name": "Available effects" diff --git a/homeassistant/components/linkplay/manifest.json b/homeassistant/components/linkplay/manifest.json index 0941f2fbe61..02acd0f04f4 100644 --- a/homeassistant/components/linkplay/manifest.json +++ b/homeassistant/components/linkplay/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["linkplay"], - "requirements": ["python-linkplay==0.2.1"], + "requirements": ["python-linkplay==0.2.2"], "zeroconf": ["_linkplay._tcp.local."] } diff --git a/homeassistant/components/local_calendar/manifest.json b/homeassistant/components/local_calendar/manifest.json index fc6d0bc00c7..528552aaa57 100644 --- a/homeassistant/components/local_calendar/manifest.json +++ b/homeassistant/components/local_calendar/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/local_calendar", "iot_class": "local_polling", "loggers": ["ical"], - "requirements": ["ical==9.0.1"] + "requirements": ["ical==9.0.3"] } diff --git a/homeassistant/components/local_todo/manifest.json b/homeassistant/components/local_todo/manifest.json index 27d3ccce4a7..6f117131c20 100644 --- a/homeassistant/components/local_todo/manifest.json +++ b/homeassistant/components/local_todo/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/local_todo", "iot_class": "local_polling", - "requirements": ["ical==9.0.1"] + "requirements": ["ical==9.0.3"] } diff --git a/homeassistant/components/lock/strings.json b/homeassistant/components/lock/strings.json index fd8636acf97..fd2854b7932 100644 --- a/homeassistant/components/lock/strings.json +++ b/homeassistant/components/lock/strings.json @@ -28,7 +28,7 @@ "locked": "[%key:common::state::locked%]", "locking": "Locking", "open": "[%key:common::state::open%]", - "opening": "Opening", + "opening": "[%key:common::state::opening%]", "unlocked": "[%key:common::state::unlocked%]", "unlocking": "Unlocking" }, diff --git a/homeassistant/components/matter/event.py b/homeassistant/components/matter/event.py index 6fa775fd1b9..fa7d96ed1ae 100644 --- a/homeassistant/components/matter/event.py +++ b/homeassistant/components/matter/event.py @@ -69,7 +69,7 @@ class MatterEventEntity(MatterEntity, EventEntity): max_presses_supported = self.get_matter_attribute_value( clusters.Switch.Attributes.MultiPressMax ) - max_presses_supported = min(max_presses_supported or 1, 8) + max_presses_supported = min(max_presses_supported or 2, 8) for i in range(max_presses_supported): event_types.append(f"multi_press_{i + 1}") # noqa: PERF401 elif feature_map & SwitchFeature.kMomentarySwitch: diff --git a/homeassistant/components/matter/number.py b/homeassistant/components/matter/number.py index 44538f46856..2c7a9651c60 100644 --- a/homeassistant/components/matter/number.py +++ b/homeassistant/components/matter/number.py @@ -169,8 +169,8 @@ DISCOVERY_SCHEMAS = [ device_class=NumberDeviceClass.TEMPERATURE, entity_category=EntityCategory.CONFIG, translation_key="temperature_offset", - native_max_value=25, - native_min_value=-25, + native_max_value=50, + native_min_value=-50, native_step=0.5, native_unit_of_measurement=UnitOfTemperature.CELSIUS, measurement_to_ha=lambda x: None if x is None else x / 10, diff --git a/homeassistant/components/matter/strings.json b/homeassistant/components/matter/strings.json index 1404d0a9076..c34666c03bb 100644 --- a/homeassistant/components/matter/strings.json +++ b/homeassistant/components/matter/strings.json @@ -160,7 +160,7 @@ "name": "On/Off transition time" }, "altitude": { - "name": "Altitude above Sea Level" + "name": "Altitude above sea level" }, "temperature_offset": { "name": "Temperature offset" @@ -258,7 +258,7 @@ "operational_state": { "name": "Operational state", "state": { - "stopped": "Stopped", + "stopped": "[%key:common::state::stopped%]", "running": "Running", "paused": "[%key:common::state::paused%]", "error": "Error", diff --git a/homeassistant/components/meater/config_flow.py b/homeassistant/components/meater/config_flow.py index a7ba3ba1498..5c11b10755c 100644 --- a/homeassistant/components/meater/config_flow.py +++ b/homeassistant/components/meater/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +import logging from typing import Any from meater import AuthenticationError, MeaterApi, ServiceUnavailableError @@ -14,6 +15,8 @@ from homeassistant.helpers import aiohttp_client from .const import DOMAIN +_LOGGER = logging.getLogger(__name__) + REAUTH_SCHEMA = vol.Schema({vol.Required(CONF_PASSWORD): str}) USER_SCHEMA = vol.Schema( {vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str} @@ -84,7 +87,8 @@ class MeaterConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_auth" except ServiceUnavailableError: errors["base"] = "service_unavailable_error" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown_auth_error" else: data = {"username": username, "password": password} diff --git a/homeassistant/components/media_extractor/manifest.json b/homeassistant/components/media_extractor/manifest.json index 575c0fa878d..e049a827c75 100644 --- a/homeassistant/components/media_extractor/manifest.json +++ b/homeassistant/components/media_extractor/manifest.json @@ -8,6 +8,6 @@ "iot_class": "calculated", "loggers": ["yt_dlp"], "quality_scale": "internal", - "requirements": ["yt-dlp[default]==2025.02.19"], + "requirements": ["yt-dlp[default]==2025.03.26"], "single_config_entry": true } diff --git a/homeassistant/components/media_player/browse_media.py b/homeassistant/components/media_player/browse_media.py index c917164a2ee..d234050c1b2 100644 --- a/homeassistant/components/media_player/browse_media.py +++ b/homeassistant/components/media_player/browse_media.py @@ -23,7 +23,11 @@ from homeassistant.helpers.network import ( from .const import CONTENT_AUTH_EXPIRY_TIME, MediaClass, MediaType # Paths that we don't need to sign -PATHS_WITHOUT_AUTH = ("/api/tts_proxy/", "/api/esphome/ffmpeg_proxy/") +PATHS_WITHOUT_AUTH = ( + "/api/tts_proxy/", + "/api/esphome/ffmpeg_proxy/", + "/api/assist_satellite/static/", +) @callback diff --git a/homeassistant/components/media_player/strings.json b/homeassistant/components/media_player/strings.json index 87b5ec692af..03106b431d7 100644 --- a/homeassistant/components/media_player/strings.json +++ b/homeassistant/components/media_player/strings.json @@ -344,7 +344,7 @@ }, "repeat": { "options": { - "off": "Off", + "off": "[%key:common::state::off%]", "all": "Repeat all", "one": "Repeat one" } diff --git a/homeassistant/components/melcloud/climate.py b/homeassistant/components/melcloud/climate.py index 9c2ee60b12c..682a28ea080 100644 --- a/homeassistant/components/melcloud/climate.py +++ b/homeassistant/components/melcloud/climate.py @@ -3,7 +3,7 @@ from __future__ import annotations from datetime import timedelta -from typing import Any +from typing import Any, cast from pymelcloud import DEVICE_TYPE_ATA, DEVICE_TYPE_ATW, AtaDevice, AtwDevice import pymelcloud.ata_device as ata @@ -236,7 +236,7 @@ class AtaDeviceClimate(MelCloudClimate): set_dict: dict[str, Any] = {} if ATTR_HVAC_MODE in kwargs: self._apply_set_hvac_mode( - kwargs.get(ATTR_HVAC_MODE, self.hvac_mode), set_dict + cast(HVACMode, kwargs.get(ATTR_HVAC_MODE, self.hvac_mode)), set_dict ) if ATTR_TEMPERATURE in kwargs: diff --git a/homeassistant/components/meteo_france/__init__.py b/homeassistant/components/meteo_france/__init__.py index 5c4ada6b5f1..5f1d5269538 100644 --- a/homeassistant/components/meteo_france/__init__.py +++ b/homeassistant/components/meteo_france/__init__.py @@ -57,7 +57,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Fetch data from API endpoint.""" assert isinstance(department, str) return await hass.async_add_executor_job( - client.get_warning_current_phenomenoms, department, 0, True + client.get_warning_current_phenomenons, department, 0, True ) coordinator_forecast = DataUpdateCoordinator( diff --git a/homeassistant/components/meteo_france/manifest.json b/homeassistant/components/meteo_france/manifest.json index 567788ec479..d82d0c3f91b 100644 --- a/homeassistant/components/meteo_france/manifest.json +++ b/homeassistant/components/meteo_france/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/meteo_france", "iot_class": "cloud_polling", "loggers": ["meteofrance_api"], - "requirements": ["meteofrance-api==1.3.0"] + "requirements": ["meteofrance-api==1.4.0"] } diff --git a/homeassistant/components/meteo_france/sensor.py b/homeassistant/components/meteo_france/sensor.py index c29cc1ceda9..7333f7b0c19 100644 --- a/homeassistant/components/meteo_france/sensor.py +++ b/homeassistant/components/meteo_france/sensor.py @@ -7,7 +7,7 @@ from typing import Any from meteofrance_api.helpers import ( get_warning_text_status_from_indice_color, - readeable_phenomenoms_dict, + readable_phenomenons_dict, ) from meteofrance_api.model.forecast import Forecast from meteofrance_api.model.rain import Rain @@ -336,7 +336,7 @@ class MeteoFranceAlertSensor(MeteoFranceSensor[CurrentPhenomenons]): def extra_state_attributes(self): """Return the state attributes.""" return { - **readeable_phenomenoms_dict(self.coordinator.data.phenomenons_max_colors), + **readable_phenomenons_dict(self.coordinator.data.phenomenons_max_colors), } diff --git a/homeassistant/components/meteoclimatic/sensor.py b/homeassistant/components/meteoclimatic/sensor.py index 169da7a0a18..6e508bd63d8 100644 --- a/homeassistant/components/meteoclimatic/sensor.py +++ b/homeassistant/components/meteoclimatic/sensor.py @@ -102,6 +102,7 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( native_unit_of_measurement=DEGREE, icon="mdi:weather-windy", device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), SensorEntityDescription( key="rain", diff --git a/homeassistant/components/mopeka/strings.json b/homeassistant/components/mopeka/strings.json index 2455eea2f76..23feb554772 100644 --- a/homeassistant/components/mopeka/strings.json +++ b/homeassistant/components/mopeka/strings.json @@ -6,7 +6,7 @@ "description": "[%key:component::bluetooth::config::step::user::description%]", "data": { "address": "[%key:common::config_flow::data::device%]", - "medium_type": "Medium Type" + "medium_type": "Medium type" } }, "bluetooth_confirm": { diff --git a/homeassistant/components/motion_blinds/config_flow.py b/homeassistant/components/motion_blinds/config_flow.py index a7bb34af1e6..954f9e25c21 100644 --- a/homeassistant/components/motion_blinds/config_flow.py +++ b/homeassistant/components/motion_blinds/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +import logging from typing import Any from motionblinds import MotionDiscovery, MotionGateway @@ -28,6 +29,8 @@ from .const import ( ) from .gateway import ConnectMotionGateway +_LOGGER = logging.getLogger(__name__) + CONFIG_SCHEMA = vol.Schema( { vol.Optional(CONF_HOST): str, @@ -93,7 +96,8 @@ class MotionBlindsFlowHandler(ConfigFlow, domain=DOMAIN): try: # key not needed for GetDeviceList request await self.hass.async_add_executor_job(gateway.GetDeviceList) - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Failed to connect to Motion Gateway") return self.async_abort(reason="not_motionblinds") if not gateway.available: diff --git a/homeassistant/components/motion_blinds/strings.json b/homeassistant/components/motion_blinds/strings.json index ddbf928462a..12060cd69f0 100644 --- a/homeassistant/components/motion_blinds/strings.json +++ b/homeassistant/components/motion_blinds/strings.json @@ -3,20 +3,20 @@ "flow_title": "{short_mac} ({ip_address})", "step": { "user": { - "description": "Connect to your Motion Gateway, if the IP address is not set, auto-discovery is used", + "description": "Connect to your Motionblinds gateway. If the IP address is not set, auto-discovery is used", "data": { "host": "[%key:common::config_flow::data::ip%]" } }, "connect": { - "description": "You will need the 16 character API Key, see https://www.home-assistant.io/integrations/motion_blinds/#retrieving-the-key for instructions", + "description": "You will need the 16 character API key, see https://www.home-assistant.io/integrations/motion_blinds/#retrieving-the-api-key for instructions", "data": { "api_key": "[%key:common::config_flow::data::api_key%]" } }, "select": { - "title": "Select the Motion Gateway that you wish to connect", - "description": "Run the setup again if you want to connect additional Motion Gateways", + "title": "Select the Motionblinds gateway that you wish to connect", + "description": "Run the setup again if you want to connect additional Motionblinds gateways", "data": { "select_ip": "[%key:common::config_flow::data::ip%]" } @@ -29,7 +29,7 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", "connection_error": "[%key:common::config_flow::error::cannot_connect%]", - "not_motionblinds": "Discovered device is not a Motion gateway" + "not_motionblinds": "Discovered device is not a Motionblinds gateway" } }, "options": { diff --git a/homeassistant/components/motionblinds_ble/strings.json b/homeassistant/components/motionblinds_ble/strings.json index d6532f12386..ec1fb080854 100644 --- a/homeassistant/components/motionblinds_ble/strings.json +++ b/homeassistant/components/motionblinds_ble/strings.json @@ -1,8 +1,8 @@ { "config": { "abort": { - "no_bluetooth_adapter": "No bluetooth adapter found", - "no_devices_found": "Could not find any bluetooth devices" + "no_bluetooth_adapter": "No Bluetooth adapter found", + "no_devices_found": "Could not find any Bluetooth devices" }, "error": { "could_not_find_motor": "Could not find a motor with that MAC code", diff --git a/homeassistant/components/mqtt/config_flow.py b/homeassistant/components/mqtt/config_flow.py index cc98315c218..83592c4c23d 100644 --- a/homeassistant/components/mqtt/config_flow.py +++ b/homeassistant/components/mqtt/config_flow.py @@ -27,6 +27,13 @@ import voluptuous as vol from homeassistant.components.file_upload import process_uploaded_file from homeassistant.components.hassio import AddonError, AddonManager, AddonState +from homeassistant.components.sensor import ( + CONF_STATE_CLASS, + DEVICE_CLASS_UNITS, + SensorDeviceClass, + SensorStateClass, +) +from homeassistant.components.switch import SwitchDeviceClass from homeassistant.config_entries import ( SOURCE_RECONFIGURE, ConfigEntry, @@ -45,18 +52,22 @@ from homeassistant.const import ( ATTR_SW_VERSION, CONF_CLIENT_ID, CONF_DEVICE, + CONF_DEVICE_CLASS, CONF_DISCOVERY, CONF_HOST, CONF_NAME, + CONF_OPTIMISTIC, CONF_PASSWORD, CONF_PAYLOAD, CONF_PLATFORM, CONF_PORT, CONF_PROTOCOL, + CONF_UNIT_OF_MEASUREMENT, CONF_USERNAME, + CONF_VALUE_TEMPLATE, ) from homeassistant.core import HomeAssistant, callback -from homeassistant.data_entry_flow import AbortFlow +from homeassistant.data_entry_flow import AbortFlow, SectionConfig, section from homeassistant.helpers import config_validation as cv, entity_registry as er from homeassistant.helpers.hassio import is_hassio from homeassistant.helpers.json import json_dumps @@ -99,11 +110,16 @@ from .const import ( CONF_COMMAND_TOPIC, CONF_DISCOVERY_PREFIX, CONF_ENTITY_PICTURE, + CONF_EXPIRE_AFTER, CONF_KEEPALIVE, + CONF_LAST_RESET_VALUE_TEMPLATE, + CONF_OPTIONS, CONF_PAYLOAD_AVAILABLE, CONF_PAYLOAD_NOT_AVAILABLE, CONF_QOS, CONF_RETAIN, + CONF_STATE_TOPIC, + CONF_SUGGESTED_DISPLAY_PRECISION, CONF_TLS_INSECURE, CONF_TRANSPORT, CONF_WILL_MESSAGE, @@ -120,6 +136,7 @@ from .const import ( DEFAULT_PORT, DEFAULT_PREFIX, DEFAULT_PROTOCOL, + DEFAULT_QOS, DEFAULT_TRANSPORT, DEFAULT_WILL, DEFAULT_WS_PATH, @@ -133,9 +150,9 @@ from .models import MqttAvailabilityData, MqttDeviceData, MqttSubentryData from .util import ( async_create_certificate_temp_files, get_file_path, + learn_more_url, valid_birth_will, valid_publish_topic, - valid_qos_schema, valid_subscribe_topic, valid_subscribe_topic_template, ) @@ -164,7 +181,6 @@ PASSWORD_SELECTOR = TextSelector(TextSelectorConfig(type=TextSelectorType.PASSWO QOS_SELECTOR = NumberSelector( NumberSelectorConfig(mode=NumberSelectorMode.BOX, min=0, max=2) ) -QOS_DATA_SCHEMA = vol.All(QOS_SELECTOR, valid_qos_schema) KEEPALIVE_SELECTOR = vol.All( NumberSelector( NumberSelectorConfig( @@ -217,7 +233,7 @@ KEY_UPLOAD_SELECTOR = FileSelector( ) # Subentry selectors -SUBENTRY_PLATFORMS = [Platform.NOTIFY] +SUBENTRY_PLATFORMS = [Platform.NOTIFY, Platform.SENSOR, Platform.SWITCH] SUBENTRY_PLATFORM_SELECTOR = SelectSelector( SelectSelectorConfig( options=[platform.value for platform in SUBENTRY_PLATFORMS], @@ -225,7 +241,6 @@ SUBENTRY_PLATFORM_SELECTOR = SelectSelector( translation_key=CONF_PLATFORM, ) ) - TEMPLATE_SELECTOR = TemplateSelector(TemplateSelectorConfig()) SUBENTRY_AVAILABILITY_SCHEMA = vol.Schema( @@ -241,52 +256,280 @@ SUBENTRY_AVAILABILITY_SCHEMA = vol.Schema( } ) +# Sensor specific selectors +SENSOR_DEVICE_CLASS_SELECTOR = SelectSelector( + SelectSelectorConfig( + options=[device_class.value for device_class in SensorDeviceClass], + mode=SelectSelectorMode.DROPDOWN, + translation_key="device_class_sensor", + sort=True, + ) +) +SENSOR_STATE_CLASS_SELECTOR = SelectSelector( + SelectSelectorConfig( + options=[device_class.value for device_class in SensorStateClass], + mode=SelectSelectorMode.DROPDOWN, + translation_key=CONF_STATE_CLASS, + ) +) +OPTIONS_SELECTOR = SelectSelector( + SelectSelectorConfig( + options=[], + custom_value=True, + multiple=True, + ) +) +SUGGESTED_DISPLAY_PRECISION_SELECTOR = NumberSelector( + NumberSelectorConfig(mode=NumberSelectorMode.BOX, min=0, max=9) +) +EXPIRE_AFTER_SELECTOR = NumberSelector( + NumberSelectorConfig(mode=NumberSelectorMode.BOX, min=0) +) -@dataclass(frozen=True) +# Switch specific selectors +SWITCH_DEVICE_CLASS_SELECTOR = SelectSelector( + SelectSelectorConfig( + options=[device_class.value for device_class in SwitchDeviceClass], + mode=SelectSelectorMode.DROPDOWN, + translation_key="device_class_switch", + ) +) + + +@callback +def validate_sensor_platform_config( + config: dict[str, Any], +) -> dict[str, str]: + """Validate the sensor options, state and device class config.""" + errors: dict[str, str] = {} + # Only allow `options` to be set for `enum` sensors + # to limit the possible sensor values + if config.get(CONF_OPTIONS) is not None: + if config.get(CONF_STATE_CLASS) or config.get(CONF_UNIT_OF_MEASUREMENT): + errors[CONF_OPTIONS] = "options_not_allowed_with_state_class_or_uom" + + if (device_class := config.get(CONF_DEVICE_CLASS)) != SensorDeviceClass.ENUM: + errors[CONF_DEVICE_CLASS] = "options_device_class_enum" + + if ( + (device_class := config.get(CONF_DEVICE_CLASS)) == SensorDeviceClass.ENUM + and errors is not None + and CONF_OPTIONS not in config + ): + errors[CONF_OPTIONS] = "options_with_enum_device_class" + + if ( + device_class in DEVICE_CLASS_UNITS + and (unit_of_measurement := config.get(CONF_UNIT_OF_MEASUREMENT)) is None + and errors is not None + ): + # Do not allow an empty unit of measurement in a subentry data flow + errors[CONF_UNIT_OF_MEASUREMENT] = "uom_required_for_device_class" + return errors + + if ( + device_class is not None + and device_class in DEVICE_CLASS_UNITS + and unit_of_measurement not in DEVICE_CLASS_UNITS[device_class] + ): + errors[CONF_UNIT_OF_MEASUREMENT] = "invalid_uom" + + return errors + + +@dataclass(frozen=True, kw_only=True) class PlatformField: """Stores a platform config field schema, required flag and validator.""" - selector: Selector + selector: Selector[Any] | Callable[..., Selector[Any]] required: bool validator: Callable[..., Any] error: str | None = None default: str | int | vol.Undefined = vol.UNDEFINED exclude_from_reconfig: bool = False + conditions: tuple[dict[str, Any], ...] | None = None + custom_filtering: bool = False + section: str | None = None + + +@callback +def unit_of_measurement_selector(user_data: dict[str, Any | None]) -> Selector: + """Return a context based unit of measurement selector.""" + if ( + user_data is None + or (device_class := user_data.get(CONF_DEVICE_CLASS)) is None + or device_class not in DEVICE_CLASS_UNITS + ): + return TEXT_SELECTOR + return SelectSelector( + SelectSelectorConfig( + options=[str(uom) for uom in DEVICE_CLASS_UNITS[device_class]], + sort=True, + custom_value=True, + ) + ) COMMON_ENTITY_FIELDS = { CONF_PLATFORM: PlatformField( - SUBENTRY_PLATFORM_SELECTOR, True, str, exclude_from_reconfig=True + selector=SUBENTRY_PLATFORM_SELECTOR, + required=True, + validator=str, + exclude_from_reconfig=True, + ), + CONF_NAME: PlatformField( + selector=TEXT_SELECTOR, + required=False, + validator=str, + exclude_from_reconfig=True, + ), + CONF_ENTITY_PICTURE: PlatformField( + selector=TEXT_SELECTOR, required=False, validator=cv.url, error="invalid_url" ), - CONF_NAME: PlatformField(TEXT_SELECTOR, False, str, exclude_from_reconfig=True), - CONF_ENTITY_PICTURE: PlatformField(TEXT_SELECTOR, False, cv.url, "invalid_url"), } -COMMON_MQTT_FIELDS = { - CONF_QOS: PlatformField(QOS_SELECTOR, False, valid_qos_schema, default=0), - CONF_RETAIN: PlatformField(BOOLEAN_SELECTOR, False, bool), +PLATFORM_ENTITY_FIELDS = { + Platform.NOTIFY.value: {}, + Platform.SENSOR.value: { + CONF_DEVICE_CLASS: PlatformField( + selector=SENSOR_DEVICE_CLASS_SELECTOR, required=False, validator=str + ), + CONF_STATE_CLASS: PlatformField( + selector=SENSOR_STATE_CLASS_SELECTOR, required=False, validator=str + ), + CONF_UNIT_OF_MEASUREMENT: PlatformField( + selector=unit_of_measurement_selector, + required=False, + validator=str, + custom_filtering=True, + ), + CONF_SUGGESTED_DISPLAY_PRECISION: PlatformField( + selector=SUGGESTED_DISPLAY_PRECISION_SELECTOR, + required=False, + validator=cv.positive_int, + section="advanced_settings", + ), + CONF_OPTIONS: PlatformField( + selector=OPTIONS_SELECTOR, + required=False, + validator=cv.ensure_list, + conditions=({"device_class": "enum"},), + ), + }, + Platform.SWITCH.value: { + CONF_DEVICE_CLASS: PlatformField( + selector=SWITCH_DEVICE_CLASS_SELECTOR, required=False, validator=str + ), + }, } PLATFORM_MQTT_FIELDS = { Platform.NOTIFY.value: { CONF_COMMAND_TOPIC: PlatformField( - TEXT_SELECTOR, True, valid_publish_topic, "invalid_publish_topic" + selector=TEXT_SELECTOR, + required=True, + validator=valid_publish_topic, + error="invalid_publish_topic", ), CONF_COMMAND_TEMPLATE: PlatformField( - TEMPLATE_SELECTOR, False, cv.template, "invalid_template" + selector=TEMPLATE_SELECTOR, + required=False, + validator=cv.template, + error="invalid_template", + ), + CONF_RETAIN: PlatformField( + selector=BOOLEAN_SELECTOR, required=False, validator=bool + ), + }, + Platform.SENSOR.value: { + CONF_STATE_TOPIC: PlatformField( + selector=TEXT_SELECTOR, + required=True, + validator=valid_subscribe_topic, + error="invalid_subscribe_topic", + ), + CONF_VALUE_TEMPLATE: PlatformField( + selector=TEMPLATE_SELECTOR, + required=False, + validator=cv.template, + error="invalid_template", + ), + CONF_LAST_RESET_VALUE_TEMPLATE: PlatformField( + selector=TEMPLATE_SELECTOR, + required=False, + validator=cv.template, + error="invalid_template", + conditions=({CONF_STATE_CLASS: "total"},), + ), + CONF_EXPIRE_AFTER: PlatformField( + selector=EXPIRE_AFTER_SELECTOR, + required=False, + validator=cv.positive_int, + section="advanced_settings", + ), + }, + Platform.SWITCH.value: { + CONF_COMMAND_TOPIC: PlatformField( + selector=TEXT_SELECTOR, + required=True, + validator=valid_publish_topic, + error="invalid_publish_topic", + ), + CONF_COMMAND_TEMPLATE: PlatformField( + selector=TEMPLATE_SELECTOR, + required=False, + validator=cv.template, + error="invalid_template", + ), + CONF_STATE_TOPIC: PlatformField( + selector=TEXT_SELECTOR, + required=False, + validator=valid_subscribe_topic, + error="invalid_subscribe_topic", + ), + CONF_VALUE_TEMPLATE: PlatformField( + selector=TEMPLATE_SELECTOR, + required=False, + validator=cv.template, + error="invalid_template", + ), + CONF_RETAIN: PlatformField( + selector=BOOLEAN_SELECTOR, required=False, validator=bool + ), + CONF_OPTIMISTIC: PlatformField( + selector=BOOLEAN_SELECTOR, required=False, validator=bool ), }, } +ENTITY_CONFIG_VALIDATOR: dict[ + str, + Callable[[dict[str, Any]], dict[str, str]] | None, +] = { + Platform.NOTIFY.value: None, + Platform.SENSOR.value: validate_sensor_platform_config, + Platform.SWITCH.value: None, +} -MQTT_DEVICE_SCHEMA = vol.Schema( - { - vol.Required(ATTR_NAME): TEXT_SELECTOR, - vol.Optional(ATTR_SW_VERSION): TEXT_SELECTOR, - vol.Optional(ATTR_HW_VERSION): TEXT_SELECTOR, - vol.Optional(ATTR_MODEL): TEXT_SELECTOR, - vol.Optional(ATTR_MODEL_ID): TEXT_SELECTOR, - vol.Optional(ATTR_CONFIGURATION_URL): TEXT_SELECTOR, - } -) +MQTT_DEVICE_PLATFORM_FIELDS = { + ATTR_NAME: PlatformField(selector=TEXT_SELECTOR, required=False, validator=str), + ATTR_SW_VERSION: PlatformField( + selector=TEXT_SELECTOR, required=False, validator=str + ), + ATTR_HW_VERSION: PlatformField( + selector=TEXT_SELECTOR, required=False, validator=str + ), + ATTR_MODEL: PlatformField(selector=TEXT_SELECTOR, required=False, validator=str), + ATTR_MODEL_ID: PlatformField(selector=TEXT_SELECTOR, required=False, validator=str), + ATTR_CONFIGURATION_URL: PlatformField( + selector=TEXT_SELECTOR, required=False, validator=cv.url, error="invalid_url" + ), + CONF_QOS: PlatformField( + selector=QOS_SELECTOR, + required=False, + validator=int, + default=DEFAULT_QOS, + section="mqtt_settings", + ), +} REAUTH_SCHEMA = vol.Schema( { @@ -337,38 +580,151 @@ def validate_field( errors[field] = error +@callback +def _check_conditions( + platform_field: PlatformField, component_data: dict[str, Any] | None = None +) -> bool: + """Only include field if one of conditions match, or no conditions are set.""" + if platform_field.conditions is None or component_data is None: + return True + return any( + all(component_data.get(key) == value for key, value in condition.items()) + for condition in platform_field.conditions + ) + + +@callback +def calculate_merged_config( + merged_user_input: dict[str, Any], + data_schema_fields: dict[str, PlatformField], + component_data: dict[str, Any], +) -> dict[str, Any]: + """Calculate merged config.""" + base_schema_fields = { + key + for key, platform_field in data_schema_fields.items() + if _check_conditions(platform_field, component_data) + } - set(merged_user_input) + return { + key: value + for key, value in component_data.items() + if key not in base_schema_fields + } | merged_user_input + + @callback def validate_user_input( user_input: dict[str, Any], data_schema_fields: dict[str, PlatformField], - errors: dict[str, str], -) -> None: + *, + component_data: dict[str, Any] | None = None, + config_validator: Callable[[dict[str, Any]], dict[str, str]] | None = None, +) -> tuple[dict[str, Any], dict[str, str]]: """Validate user input.""" - for field, value in user_input.items(): + errors: dict[str, str] = {} + # Merge sections + merged_user_input: dict[str, Any] = {} + for key, value in user_input.items(): + if isinstance(value, dict): + merged_user_input.update(value) + else: + merged_user_input[key] = value + + for field, value in merged_user_input.items(): validator = data_schema_fields[field].validator try: validator(value) except (ValueError, vol.Invalid): errors[field] = data_schema_fields[field].error or "invalid_input" + if config_validator is not None: + if TYPE_CHECKING: + assert component_data is not None + + errors |= config_validator( + calculate_merged_config( + merged_user_input, data_schema_fields, component_data + ), + ) + + return merged_user_input, errors + @callback def data_schema_from_fields( data_schema_fields: dict[str, PlatformField], reconfig: bool, + component_data: dict[str, Any] | None = None, + user_input: dict[str, Any] | None = None, + device_data: MqttDeviceData | None = None, ) -> vol.Schema: - """Generate data schema from platform fields.""" - return vol.Schema( - { + """Generate custom data schema from platform fields or device data.""" + if device_data is not None: + component_data_with_user_input: dict[str, Any] | None = dict(device_data) + if TYPE_CHECKING: + assert component_data_with_user_input is not None + component_data_with_user_input.update( + component_data_with_user_input.pop("mqtt_settings", {}) + ) + else: + component_data_with_user_input = deepcopy(component_data) + if component_data_with_user_input is not None and user_input is not None: + component_data_with_user_input |= user_input + + sections: dict[str | None, None] = { + field_details.section: None for field_details in data_schema_fields.values() + } + data_schema: dict[Any, Any] = {} + all_data_element_options: set[Any] = set() + no_reconfig_options: set[Any] = set() + for schema_section in sections: + data_schema_element = { vol.Required(field_name, default=field_details.default) if field_details.required else vol.Optional( field_name, default=field_details.default - ): field_details.selector + ): field_details.selector(component_data_with_user_input) # type: ignore[operator] + if field_details.custom_filtering + else field_details.selector for field_name, field_details in data_schema_fields.items() - if not field_details.exclude_from_reconfig or not reconfig + if field_details.section == schema_section + and (not field_details.exclude_from_reconfig or not reconfig) + and _check_conditions(field_details, component_data_with_user_input) } - ) + data_element_options = set(data_schema_element) + all_data_element_options |= data_element_options + no_reconfig_options |= { + field_name + for field_name, field_details in data_schema_fields.items() + if field_details.section == schema_section + and field_details.exclude_from_reconfig + } + if schema_section is None: + data_schema.update(data_schema_element) + continue + collapsed = ( + not any( + (default := data_schema_fields[str(option)].default) is vol.UNDEFINED + or component_data_with_user_input[str(option)] != default + for option in data_element_options + if option in component_data_with_user_input + ) + if component_data_with_user_input is not None + else True + ) + data_schema[vol.Optional(schema_section)] = section( + vol.Schema(data_schema_element), SectionConfig({"collapsed": collapsed}) + ) + + # Reset all fields from the component_data not in the schema + if component_data: + filtered_fields = ( + set(data_schema_fields) - all_data_element_options - no_reconfig_options + ) + for field in filtered_fields: + if field in component_data: + del component_data[field] + return vol.Schema(data_schema) class FlowHandler(ConfigFlow, domain=DOMAIN): @@ -849,7 +1205,7 @@ class MQTTOptionsFlowHandler(OptionsFlow): "birth_payload", description={"suggested_value": birth[CONF_PAYLOAD]} ) ] = TEXT_SELECTOR - fields[vol.Optional("birth_qos", default=birth[ATTR_QOS])] = QOS_DATA_SCHEMA + fields[vol.Optional("birth_qos", default=birth[ATTR_QOS])] = QOS_SELECTOR fields[vol.Optional("birth_retain", default=birth[ATTR_RETAIN])] = ( BOOLEAN_SELECTOR ) @@ -872,7 +1228,7 @@ class MQTTOptionsFlowHandler(OptionsFlow): "will_payload", description={"suggested_value": will[CONF_PAYLOAD]} ) ] = TEXT_SELECTOR - fields[vol.Optional("will_qos", default=will[ATTR_QOS])] = QOS_DATA_SCHEMA + fields[vol.Optional("will_qos", default=will[ATTR_QOS])] = QOS_SELECTOR fields[vol.Optional("will_retain", default=will[ATTR_RETAIN])] = ( BOOLEAN_SELECTOR ) @@ -893,20 +1249,56 @@ class MQTTSubentryFlowHandler(ConfigSubentryFlow): @callback def update_component_fields( - self, data_schema: vol.Schema, user_input: dict[str, Any] + self, + data_schema_fields: dict[str, PlatformField], + merged_user_input: dict[str, Any], ) -> None: """Update the componment fields.""" if TYPE_CHECKING: assert self._component_id is not None component_data = self._subentry_data["components"][self._component_id] - # Remove the fields from the component data if they are not in the user input - for field in [ - form_field - for form_field in data_schema.schema - if form_field in component_data and form_field not in user_input - ]: + # Remove the fields from the component data + # if they are not in the schema and not in the user input + config = calculate_merged_config( + merged_user_input, data_schema_fields, component_data + ) + for field in ( + field + for field, platform_field in data_schema_fields.items() + if field in (set(component_data) - set(config)) + and not platform_field.exclude_from_reconfig + ): component_data.pop(field) - component_data.update(user_input) + component_data.update(merged_user_input) + + @callback + def generate_names(self) -> tuple[str, str]: + """Generate the device and full entity name.""" + if TYPE_CHECKING: + assert self._component_id is not None + device_name = self._subentry_data[CONF_DEVICE][CONF_NAME] + if entity_name := self._subentry_data["components"][self._component_id].get( + CONF_NAME + ): + full_entity_name: str = f"{device_name} {entity_name}" + else: + full_entity_name = device_name + return device_name, full_entity_name + + @callback + def get_suggested_values_from_component( + self, data_schema: vol.Schema + ) -> dict[str, Any]: + """Get suggestions from component data based on the data schema.""" + if TYPE_CHECKING: + assert self._component_id is not None + component_data = self._subentry_data["components"][self._component_id] + return { + field_key: self.get_suggested_values_from_component(value.schema) + if isinstance(value, section) + else component_data.get(field_key) + for field_key, value in data_schema.schema.items() + } async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -929,17 +1321,22 @@ class MQTTSubentryFlowHandler(ConfigSubentryFlow): self, user_input: dict[str, Any] | None = None ) -> SubentryFlowResult: """Add a new MQTT device.""" - errors: dict[str, str] = {} - validate_field("configuration_url", cv.url, user_input, errors, "invalid_url") - if not errors and user_input is not None: - self._subentry_data[CONF_DEVICE] = cast(MqttDeviceData, user_input) - if self.source == SOURCE_RECONFIGURE: - return await self.async_step_summary_menu() - return await self.async_step_entity() - + errors: dict[str, Any] = {} + device_data = self._subentry_data[CONF_DEVICE] + data_schema = data_schema_from_fields( + MQTT_DEVICE_PLATFORM_FIELDS, + device_data=device_data, + reconfig=True, + ) + if user_input is not None: + _, errors = validate_user_input(user_input, MQTT_DEVICE_PLATFORM_FIELDS) + if not errors: + self._subentry_data[CONF_DEVICE] = cast(MqttDeviceData, user_input) + if self.source == SOURCE_RECONFIGURE: + return await self.async_step_summary_menu() + return await self.async_step_entity() data_schema = self.add_suggested_values_to_schema( - MQTT_DEVICE_SCHEMA, - self._subentry_data[CONF_DEVICE] if user_input is None else user_input, + data_schema, device_data if user_input is None else user_input ) return self.async_show_form( step_id=CONF_DEVICE, @@ -956,25 +1353,28 @@ class MQTTSubentryFlowHandler(ConfigSubentryFlow): data_schema_fields = COMMON_ENTITY_FIELDS entity_name_label: str = "" platform_label: str = "" + component_data: dict[str, Any] | None = None if reconfig := (self._component_id is not None): - name: str | None = self._subentry_data["components"][ - self._component_id - ].get(CONF_NAME) + component_data = self._subentry_data["components"][self._component_id] + name: str | None = component_data.get(CONF_NAME) platform_label = f"{self._subentry_data['components'][self._component_id][CONF_PLATFORM]} " entity_name_label = f" ({name})" if name is not None else "" data_schema = data_schema_from_fields(data_schema_fields, reconfig=reconfig) if user_input is not None: - validate_user_input(user_input, data_schema_fields, errors) + merged_user_input, errors = validate_user_input( + user_input, data_schema_fields, component_data=component_data + ) if not errors: if self._component_id is None: self._component_id = uuid4().hex self._subentry_data["components"].setdefault(self._component_id, {}) - self.update_component_fields(data_schema, user_input) - return await self.async_step_mqtt_platform_config() + self.update_component_fields(data_schema_fields, merged_user_input) + return await self.async_step_entity_platform_config() data_schema = self.add_suggested_values_to_schema(data_schema, user_input) elif self.source == SOURCE_RECONFIGURE and self._component_id is not None: data_schema = self.add_suggested_values_to_schema( - data_schema, self._subentry_data["components"][self._component_id] + data_schema, + self.get_suggested_values_from_component(data_schema), ) device_name = self._subentry_data[CONF_DEVICE][CONF_NAME] return self.async_show_form( @@ -994,9 +1394,11 @@ class MQTTSubentryFlowHandler(ConfigSubentryFlow): device_name = self._subentry_data[CONF_DEVICE][CONF_NAME] entities = [ SelectOptionDict( - value=key, label=f"{device_name} {component.get(CONF_NAME, '-')}" + value=key, + label=f"{device_name} {component_data.get(CONF_NAME, '-')}" + f" ({component_data[CONF_PLATFORM]})", ) - for key, component in self._subentry_data["components"].items() + for key, component_data in self._subentry_data["components"].items() ] data_schema = vol.Schema( { @@ -1034,6 +1436,61 @@ class MQTTSubentryFlowHandler(ConfigSubentryFlow): return await self.async_step_summary_menu() return self._show_update_or_delete_form("delete_entity") + async def async_step_entity_platform_config( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Configure platform entity details.""" + if TYPE_CHECKING: + assert self._component_id is not None + component_data = self._subentry_data["components"][self._component_id] + platform = component_data[CONF_PLATFORM] + data_schema_fields = PLATFORM_ENTITY_FIELDS[platform] + errors: dict[str, str] = {} + + data_schema = data_schema_from_fields( + data_schema_fields, + reconfig=bool( + {field for field in data_schema_fields if field in component_data} + ), + component_data=component_data, + user_input=user_input, + ) + if not data_schema.schema: + return await self.async_step_mqtt_platform_config() + if user_input is not None: + # Test entity fields against the validator + merged_user_input, errors = validate_user_input( + user_input, + data_schema_fields, + component_data=component_data, + config_validator=ENTITY_CONFIG_VALIDATOR[platform], + ) + if not errors: + self.update_component_fields(data_schema_fields, merged_user_input) + return await self.async_step_mqtt_platform_config() + + data_schema = self.add_suggested_values_to_schema(data_schema, user_input) + else: + data_schema = self.add_suggested_values_to_schema( + data_schema, + self.get_suggested_values_from_component(data_schema), + ) + + device_name, full_entity_name = self.generate_names() + return self.async_show_form( + step_id="entity_platform_config", + data_schema=data_schema, + description_placeholders={ + "mqtt_device": device_name, + CONF_PLATFORM: platform, + "entity": full_entity_name, + "url": learn_more_url(platform), + } + | (user_input or {}), + errors=errors, + last_step=False, + ) + async def async_step_mqtt_platform_config( self, user_input: dict[str, Any] | None = None ) -> SubentryFlowResult: @@ -1041,16 +1498,26 @@ class MQTTSubentryFlowHandler(ConfigSubentryFlow): errors: dict[str, str] = {} if TYPE_CHECKING: assert self._component_id is not None - platform = self._subentry_data["components"][self._component_id][CONF_PLATFORM] - data_schema_fields = PLATFORM_MQTT_FIELDS[platform] | COMMON_MQTT_FIELDS + component_data = self._subentry_data["components"][self._component_id] + platform = component_data[CONF_PLATFORM] + data_schema_fields = PLATFORM_MQTT_FIELDS[platform] data_schema = data_schema_from_fields( - data_schema_fields, reconfig=self._component_id is not None + data_schema_fields, + reconfig=bool( + {field for field in data_schema_fields if field in component_data} + ), + component_data=component_data, ) if user_input is not None: # Test entity fields against the validator - validate_user_input(user_input, data_schema_fields, errors) + merged_user_input, errors = validate_user_input( + user_input, + data_schema_fields, + component_data=component_data, + config_validator=ENTITY_CONFIG_VALIDATOR[platform], + ) if not errors: - self.update_component_fields(data_schema, user_input) + self.update_component_fields(data_schema_fields, merged_user_input) self._component_id = None if self.source == SOURCE_RECONFIGURE: return await self.async_step_summary_menu() @@ -1059,16 +1526,10 @@ class MQTTSubentryFlowHandler(ConfigSubentryFlow): data_schema = self.add_suggested_values_to_schema(data_schema, user_input) else: data_schema = self.add_suggested_values_to_schema( - data_schema, self._subentry_data["components"][self._component_id] + data_schema, + self.get_suggested_values_from_component(data_schema), ) - device_name = self._subentry_data[CONF_DEVICE][CONF_NAME] - entity_name: str | None - if entity_name := self._subentry_data["components"][self._component_id].get( - CONF_NAME - ): - full_entity_name: str = f"{device_name} {entity_name}" - else: - full_entity_name = device_name + device_name, full_entity_name = self.generate_names() return self.async_show_form( step_id="mqtt_platform_config", data_schema=data_schema, @@ -1076,6 +1537,7 @@ class MQTTSubentryFlowHandler(ConfigSubentryFlow): "mqtt_device": device_name, CONF_PLATFORM: platform, "entity": full_entity_name, + "url": learn_more_url(platform), }, errors=errors, last_step=False, @@ -1087,12 +1549,12 @@ class MQTTSubentryFlowHandler(ConfigSubentryFlow): ) -> SubentryFlowResult: """Create a subentry for a new MQTT device.""" device_name = self._subentry_data[CONF_DEVICE][CONF_NAME] - component: dict[str, Any] = next( + component_data: dict[str, Any] = next( iter(self._subentry_data["components"].values()) ) - platform = component[CONF_PLATFORM] + platform = component_data[CONF_PLATFORM] entity_name: str | None - if entity_name := component.get(CONF_NAME): + if entity_name := component_data.get(CONF_NAME): full_entity_name: str = f"{device_name} {entity_name}" else: full_entity_name = device_name @@ -1151,8 +1613,8 @@ class MQTTSubentryFlowHandler(ConfigSubentryFlow): self._component_id = None mqtt_device = self._subentry_data[CONF_DEVICE][CONF_NAME] mqtt_items = ", ".join( - f"{mqtt_device} {component.get(CONF_NAME, '-')}" - for component in self._subentry_data["components"].values() + f"{mqtt_device} {component_data.get(CONF_NAME, '-')} ({component_data[CONF_PLATFORM]})" + for component_data in self._subentry_data["components"].values() ) menu_options = [ "entity", diff --git a/homeassistant/components/mqtt/const.py b/homeassistant/components/mqtt/const.py index 007b3b7e576..b2fcd492435 100644 --- a/homeassistant/components/mqtt/const.py +++ b/homeassistant/components/mqtt/const.py @@ -56,20 +56,55 @@ CONF_SUPPORTED_FEATURES = "supported_features" CONF_ACTION_TEMPLATE = "action_template" CONF_ACTION_TOPIC = "action_topic" +CONF_BLUE_TEMPLATE = "blue_template" +CONF_BRIGHTNESS_COMMAND_TEMPLATE = "brightness_command_template" +CONF_BRIGHTNESS_COMMAND_TOPIC = "brightness_command_topic" +CONF_BRIGHTNESS_SCALE = "brightness_scale" +CONF_BRIGHTNESS_STATE_TOPIC = "brightness_state_topic" +CONF_BRIGHTNESS_TEMPLATE = "brightness_template" +CONF_BRIGHTNESS_VALUE_TEMPLATE = "brightness_value_template" +CONF_COLOR_MODE = "color_mode" +CONF_COLOR_MODE_STATE_TOPIC = "color_mode_state_topic" +CONF_COLOR_MODE_VALUE_TEMPLATE = "color_mode_value_template" +CONF_COLOR_TEMP_COMMAND_TEMPLATE = "color_temp_command_template" +CONF_COLOR_TEMP_COMMAND_TOPIC = "color_temp_command_topic" CONF_COLOR_TEMP_KELVIN = "color_temp_kelvin" +CONF_COLOR_TEMP_TEMPLATE = "color_temp_template" +CONF_COLOR_TEMP_STATE_TOPIC = "color_temp_state_topic" +CONF_COLOR_TEMP_VALUE_TEMPLATE = "color_temp_value_template" +CONF_COMMAND_OFF_TEMPLATE = "command_off_template" +CONF_COMMAND_ON_TEMPLATE = "command_on_template" CONF_CURRENT_HUMIDITY_TEMPLATE = "current_humidity_template" CONF_CURRENT_HUMIDITY_TOPIC = "current_humidity_topic" CONF_CURRENT_TEMP_TEMPLATE = "current_temperature_template" CONF_CURRENT_TEMP_TOPIC = "current_temperature_topic" CONF_ENABLED_BY_DEFAULT = "enabled_by_default" +CONF_EFFECT_COMMAND_TEMPLATE = "effect_command_template" +CONF_EFFECT_COMMAND_TOPIC = "effect_command_topic" +CONF_EFFECT_LIST = "effect_list" +CONF_EFFECT_STATE_TOPIC = "effect_state_topic" +CONF_EFFECT_TEMPLATE = "effect_template" +CONF_EFFECT_VALUE_TEMPLATE = "effect_value_template" CONF_ENTITY_PICTURE = "entity_picture" +CONF_EXPIRE_AFTER = "expire_after" +CONF_FLASH_TIME_LONG = "flash_time_long" +CONF_FLASH_TIME_SHORT = "flash_time_short" +CONF_GREEN_TEMPLATE = "green_template" +CONF_HS_COMMAND_TEMPLATE = "hs_command_template" +CONF_HS_COMMAND_TOPIC = "hs_command_topic" +CONF_HS_STATE_TOPIC = "hs_state_topic" +CONF_HS_VALUE_TEMPLATE = "hs_value_template" +CONF_LAST_RESET_VALUE_TEMPLATE = "last_reset_value_template" CONF_MAX_KELVIN = "max_kelvin" +CONF_MAX_MIREDS = "max_mireds" CONF_MIN_KELVIN = "min_kelvin" +CONF_MIN_MIREDS = "min_mireds" CONF_MODE_COMMAND_TEMPLATE = "mode_command_template" CONF_MODE_COMMAND_TOPIC = "mode_command_topic" CONF_MODE_LIST = "modes" CONF_MODE_STATE_TEMPLATE = "mode_state_template" CONF_MODE_STATE_TOPIC = "mode_state_topic" +CONF_ON_COMMAND_TYPE = "on_command_type" CONF_PAYLOAD_CLOSE = "payload_close" CONF_PAYLOAD_OPEN = "payload_open" CONF_PAYLOAD_STOP = "payload_stop" @@ -78,10 +113,25 @@ CONF_POSITION_OPEN = "position_open" CONF_POWER_COMMAND_TOPIC = "power_command_topic" CONF_POWER_COMMAND_TEMPLATE = "power_command_template" CONF_PRECISION = "precision" +CONF_RED_TEMPLATE = "red_template" +CONF_RGB_COMMAND_TEMPLATE = "rgb_command_template" +CONF_RGB_COMMAND_TOPIC = "rgb_command_topic" +CONF_RGB_STATE_TOPIC = "rgb_state_topic" +CONF_RGB_VALUE_TEMPLATE = "rgb_value_template" +CONF_RGBW_COMMAND_TEMPLATE = "rgbw_command_template" +CONF_RGBW_COMMAND_TOPIC = "rgbw_command_topic" +CONF_RGBW_STATE_TOPIC = "rgbw_state_topic" +CONF_RGBW_VALUE_TEMPLATE = "rgbw_value_template" +CONF_RGBWW_COMMAND_TEMPLATE = "rgbww_command_template" +CONF_RGBWW_COMMAND_TOPIC = "rgbww_command_topic" +CONF_RGBWW_STATE_TOPIC = "rgbww_state_topic" +CONF_RGBWW_VALUE_TEMPLATE = "rgbww_value_template" CONF_STATE_CLOSED = "state_closed" CONF_STATE_CLOSING = "state_closing" CONF_STATE_OPEN = "state_open" CONF_STATE_OPENING = "state_opening" +CONF_SUGGESTED_DISPLAY_PRECISION = "suggested_display_precision" +CONF_SUPPORTED_COLOR_MODES = "supported_color_modes" CONF_TEMP_COMMAND_TEMPLATE = "temperature_command_template" CONF_TEMP_COMMAND_TOPIC = "temperature_command_topic" CONF_TEMP_STATE_TEMPLATE = "temperature_state_template" @@ -89,7 +139,14 @@ CONF_TEMP_STATE_TOPIC = "temperature_state_topic" CONF_TEMP_INITIAL = "initial" CONF_TEMP_MAX = "max_temp" CONF_TEMP_MIN = "min_temp" +CONF_XY_COMMAND_TEMPLATE = "xy_command_template" +CONF_XY_COMMAND_TOPIC = "xy_command_topic" +CONF_XY_STATE_TOPIC = "xy_state_topic" +CONF_XY_VALUE_TEMPLATE = "xy_value_template" +CONF_WHITE_COMMAND_TOPIC = "white_command_topic" +CONF_WHITE_SCALE = "white_scale" +# Config flow constants CONF_CERTIFICATE = "certificate" CONF_CLIENT_KEY = "client_key" CONF_CLIENT_CERT = "client_cert" @@ -110,15 +167,23 @@ CONF_CONFIGURATION_URL = "configuration_url" CONF_OBJECT_ID = "object_id" CONF_SUPPORT_URL = "support_url" +DEFAULT_BRIGHTNESS = False +DEFAULT_BRIGHTNESS_SCALE = 255 DEFAULT_PREFIX = "homeassistant" DEFAULT_BIRTH_WILL_TOPIC = DEFAULT_PREFIX + "/status" DEFAULT_DISCOVERY = True +DEFAULT_EFFECT = False DEFAULT_ENCODING = "utf-8" +DEFAULT_FLASH_TIME_LONG = 10 +DEFAULT_FLASH_TIME_SHORT = 2 DEFAULT_OPTIMISTIC = False +DEFAULT_ON_COMMAND_TYPE = "last" DEFAULT_QOS = 0 DEFAULT_PAYLOAD_AVAILABLE = "online" DEFAULT_PAYLOAD_CLOSE = "CLOSE" DEFAULT_PAYLOAD_NOT_AVAILABLE = "offline" +DEFAULT_PAYLOAD_OFF = "OFF" +DEFAULT_PAYLOAD_ON = "ON" DEFAULT_PAYLOAD_OPEN = "OPEN" DEFAULT_PORT = 1883 DEFAULT_RETAIN = False @@ -127,6 +192,7 @@ DEFAULT_WS_PATH = "/" DEFAULT_POSITION_CLOSED = 0 DEFAULT_POSITION_OPEN = 100 DEFAULT_RETAIN = False +DEFAULT_WHITE_SCALE = 255 PROTOCOL_31 = "3.1" PROTOCOL_311 = "3.1.1" diff --git a/homeassistant/components/mqtt/entity.py b/homeassistant/components/mqtt/entity.py index 0b4f65fab47..8446f9041c9 100644 --- a/homeassistant/components/mqtt/entity.py +++ b/homeassistant/components/mqtt/entity.py @@ -123,7 +123,7 @@ from .subscription import ( async_subscribe_topics_internal, async_unsubscribe_topics, ) -from .util import mqtt_config_entry_enabled +from .util import learn_more_url, mqtt_config_entry_enabled _LOGGER = logging.getLogger(__name__) @@ -300,6 +300,7 @@ def async_setup_entity_entry_helper( availability_config = subentry_data.get("availability", {}) subentry_entities: list[Entity] = [] device_config = subentry_data["device"].copy() + device_mqtt_options = device_config.pop("mqtt_settings", {}) device_config["identifiers"] = config_subentry_id for component_id, component_data in subentry_data["components"].items(): if component_data["platform"] != domain: @@ -311,6 +312,7 @@ def async_setup_entity_entry_helper( component_config[CONF_DEVICE] = device_config component_config.pop("platform") component_config.update(availability_config) + component_config.update(device_mqtt_options) try: config = platform_schema_modern(component_config) @@ -346,9 +348,6 @@ def async_setup_entity_entry_helper( line = getattr(yaml_config, "__line__", "?") issue_id = hex(hash(frozenset(yaml_config))) yaml_config_str = yaml_dump(yaml_config) - learn_more_url = ( - f"https://www.home-assistant.io/integrations/{domain}.mqtt/" - ) async_create_issue( hass, DOMAIN, @@ -356,7 +355,7 @@ def async_setup_entity_entry_helper( issue_domain=domain, is_fixable=False, severity=IssueSeverity.ERROR, - learn_more_url=learn_more_url, + learn_more_url=learn_more_url(domain), translation_placeholders={ "domain": domain, "config_file": config_file, diff --git a/homeassistant/components/mqtt/light/schema_basic.py b/homeassistant/components/mqtt/light/schema_basic.py index a2f424b247d..a950aced665 100644 --- a/homeassistant/components/mqtt/light/schema_basic.py +++ b/homeassistant/components/mqtt/light/schema_basic.py @@ -51,12 +51,58 @@ from homeassistant.util import color as color_util from .. import subscription from ..config import MQTT_RW_SCHEMA from ..const import ( + CONF_BRIGHTNESS_COMMAND_TEMPLATE, + CONF_BRIGHTNESS_COMMAND_TOPIC, + CONF_BRIGHTNESS_SCALE, + CONF_BRIGHTNESS_STATE_TOPIC, + CONF_BRIGHTNESS_VALUE_TEMPLATE, + CONF_COLOR_MODE_STATE_TOPIC, + CONF_COLOR_MODE_VALUE_TEMPLATE, + CONF_COLOR_TEMP_COMMAND_TEMPLATE, + CONF_COLOR_TEMP_COMMAND_TOPIC, CONF_COLOR_TEMP_KELVIN, + CONF_COLOR_TEMP_STATE_TOPIC, + CONF_COLOR_TEMP_VALUE_TEMPLATE, CONF_COMMAND_TOPIC, + CONF_EFFECT_COMMAND_TEMPLATE, + CONF_EFFECT_COMMAND_TOPIC, + CONF_EFFECT_LIST, + CONF_EFFECT_STATE_TOPIC, + CONF_EFFECT_VALUE_TEMPLATE, + CONF_HS_COMMAND_TEMPLATE, + CONF_HS_COMMAND_TOPIC, + CONF_HS_STATE_TOPIC, + CONF_HS_VALUE_TEMPLATE, CONF_MAX_KELVIN, + CONF_MAX_MIREDS, CONF_MIN_KELVIN, + CONF_MIN_MIREDS, + CONF_ON_COMMAND_TYPE, + CONF_RGB_COMMAND_TEMPLATE, + CONF_RGB_COMMAND_TOPIC, + CONF_RGB_STATE_TOPIC, + CONF_RGB_VALUE_TEMPLATE, + CONF_RGBW_COMMAND_TEMPLATE, + CONF_RGBW_COMMAND_TOPIC, + CONF_RGBW_STATE_TOPIC, + CONF_RGBW_VALUE_TEMPLATE, + CONF_RGBWW_COMMAND_TEMPLATE, + CONF_RGBWW_COMMAND_TOPIC, + CONF_RGBWW_STATE_TOPIC, + CONF_RGBWW_VALUE_TEMPLATE, CONF_STATE_TOPIC, CONF_STATE_VALUE_TEMPLATE, + CONF_WHITE_COMMAND_TOPIC, + CONF_WHITE_SCALE, + CONF_XY_COMMAND_TEMPLATE, + CONF_XY_COMMAND_TOPIC, + CONF_XY_STATE_TOPIC, + CONF_XY_VALUE_TEMPLATE, + DEFAULT_BRIGHTNESS_SCALE, + DEFAULT_ON_COMMAND_TYPE, + DEFAULT_PAYLOAD_OFF, + DEFAULT_PAYLOAD_ON, + DEFAULT_WHITE_SCALE, PAYLOAD_NONE, ) from ..entity import MqttEntity @@ -74,47 +120,7 @@ from .schema import MQTT_LIGHT_SCHEMA_SCHEMA _LOGGER = logging.getLogger(__name__) -CONF_BRIGHTNESS_COMMAND_TEMPLATE = "brightness_command_template" -CONF_BRIGHTNESS_COMMAND_TOPIC = "brightness_command_topic" -CONF_BRIGHTNESS_SCALE = "brightness_scale" -CONF_BRIGHTNESS_STATE_TOPIC = "brightness_state_topic" -CONF_BRIGHTNESS_VALUE_TEMPLATE = "brightness_value_template" -CONF_COLOR_MODE_STATE_TOPIC = "color_mode_state_topic" -CONF_COLOR_MODE_VALUE_TEMPLATE = "color_mode_value_template" -CONF_COLOR_TEMP_COMMAND_TEMPLATE = "color_temp_command_template" -CONF_COLOR_TEMP_COMMAND_TOPIC = "color_temp_command_topic" -CONF_COLOR_TEMP_STATE_TOPIC = "color_temp_state_topic" -CONF_COLOR_TEMP_VALUE_TEMPLATE = "color_temp_value_template" -CONF_EFFECT_COMMAND_TEMPLATE = "effect_command_template" -CONF_EFFECT_COMMAND_TOPIC = "effect_command_topic" -CONF_EFFECT_LIST = "effect_list" -CONF_EFFECT_STATE_TOPIC = "effect_state_topic" -CONF_EFFECT_VALUE_TEMPLATE = "effect_value_template" -CONF_HS_COMMAND_TEMPLATE = "hs_command_template" -CONF_HS_COMMAND_TOPIC = "hs_command_topic" -CONF_HS_STATE_TOPIC = "hs_state_topic" -CONF_HS_VALUE_TEMPLATE = "hs_value_template" -CONF_MAX_MIREDS = "max_mireds" -CONF_MIN_MIREDS = "min_mireds" -CONF_RGB_COMMAND_TEMPLATE = "rgb_command_template" -CONF_RGB_COMMAND_TOPIC = "rgb_command_topic" -CONF_RGB_STATE_TOPIC = "rgb_state_topic" -CONF_RGB_VALUE_TEMPLATE = "rgb_value_template" -CONF_RGBW_COMMAND_TEMPLATE = "rgbw_command_template" -CONF_RGBW_COMMAND_TOPIC = "rgbw_command_topic" -CONF_RGBW_STATE_TOPIC = "rgbw_state_topic" -CONF_RGBW_VALUE_TEMPLATE = "rgbw_value_template" -CONF_RGBWW_COMMAND_TEMPLATE = "rgbww_command_template" -CONF_RGBWW_COMMAND_TOPIC = "rgbww_command_topic" -CONF_RGBWW_STATE_TOPIC = "rgbww_state_topic" -CONF_RGBWW_VALUE_TEMPLATE = "rgbww_value_template" -CONF_XY_COMMAND_TEMPLATE = "xy_command_template" -CONF_XY_COMMAND_TOPIC = "xy_command_topic" -CONF_XY_STATE_TOPIC = "xy_state_topic" -CONF_XY_VALUE_TEMPLATE = "xy_value_template" -CONF_WHITE_COMMAND_TOPIC = "white_command_topic" -CONF_WHITE_SCALE = "white_scale" -CONF_ON_COMMAND_TYPE = "on_command_type" +DEFAULT_NAME = "MQTT LightEntity" MQTT_LIGHT_ATTRIBUTES_BLOCKED = frozenset( { @@ -137,13 +143,6 @@ MQTT_LIGHT_ATTRIBUTES_BLOCKED = frozenset( } ) -DEFAULT_BRIGHTNESS_SCALE = 255 -DEFAULT_NAME = "MQTT LightEntity" -DEFAULT_PAYLOAD_OFF = "OFF" -DEFAULT_PAYLOAD_ON = "ON" -DEFAULT_WHITE_SCALE = 255 -DEFAULT_ON_COMMAND_TYPE = "last" - VALUES_ON_COMMAND_TYPE = ["first", "last", "brightness"] COMMAND_TEMPLATE_KEYS = [ diff --git a/homeassistant/components/mqtt/light/schema_json.py b/homeassistant/components/mqtt/light/schema_json.py index d18da9e917a..a1f86278cf0 100644 --- a/homeassistant/components/mqtt/light/schema_json.py +++ b/homeassistant/components/mqtt/light/schema_json.py @@ -55,13 +55,26 @@ from homeassistant.util.json import json_loads_object from .. import subscription from ..config import DEFAULT_QOS, DEFAULT_RETAIN, MQTT_RW_SCHEMA from ..const import ( + CONF_COLOR_MODE, CONF_COLOR_TEMP_KELVIN, CONF_COMMAND_TOPIC, + CONF_EFFECT_LIST, + CONF_FLASH_TIME_LONG, + CONF_FLASH_TIME_SHORT, CONF_MAX_KELVIN, + CONF_MAX_MIREDS, CONF_MIN_KELVIN, + CONF_MIN_MIREDS, CONF_QOS, CONF_RETAIN, CONF_STATE_TOPIC, + CONF_SUPPORTED_COLOR_MODES, + DEFAULT_BRIGHTNESS, + DEFAULT_BRIGHTNESS_SCALE, + DEFAULT_EFFECT, + DEFAULT_FLASH_TIME_LONG, + DEFAULT_FLASH_TIME_SHORT, + DEFAULT_WHITE_SCALE, ) from ..entity import MqttEntity from ..models import ReceiveMessage @@ -78,25 +91,7 @@ _LOGGER = logging.getLogger(__name__) DOMAIN = "mqtt_json" -DEFAULT_BRIGHTNESS = False -DEFAULT_EFFECT = False -DEFAULT_FLASH_TIME_LONG = 10 -DEFAULT_FLASH_TIME_SHORT = 2 DEFAULT_NAME = "MQTT JSON Light" -DEFAULT_BRIGHTNESS_SCALE = 255 -DEFAULT_WHITE_SCALE = 255 - -CONF_COLOR_MODE = "color_mode" -CONF_SUPPORTED_COLOR_MODES = "supported_color_modes" - -CONF_EFFECT_LIST = "effect_list" - -CONF_FLASH_TIME_LONG = "flash_time_long" -CONF_FLASH_TIME_SHORT = "flash_time_short" - -CONF_MAX_MIREDS = "max_mireds" -CONF_MIN_MIREDS = "min_mireds" - _PLATFORM_SCHEMA_BASE = ( MQTT_RW_SCHEMA.extend( diff --git a/homeassistant/components/mqtt/light/schema_template.py b/homeassistant/components/mqtt/light/schema_template.py index 901cee6f14c..595f072416b 100644 --- a/homeassistant/components/mqtt/light/schema_template.py +++ b/homeassistant/components/mqtt/light/schema_template.py @@ -40,10 +40,21 @@ from homeassistant.util import color as color_util from .. import subscription from ..config import MQTT_RW_SCHEMA from ..const import ( + CONF_BLUE_TEMPLATE, + CONF_BRIGHTNESS_TEMPLATE, CONF_COLOR_TEMP_KELVIN, + CONF_COLOR_TEMP_TEMPLATE, + CONF_COMMAND_OFF_TEMPLATE, + CONF_COMMAND_ON_TEMPLATE, CONF_COMMAND_TOPIC, + CONF_EFFECT_LIST, + CONF_EFFECT_TEMPLATE, + CONF_GREEN_TEMPLATE, CONF_MAX_KELVIN, + CONF_MAX_MIREDS, CONF_MIN_KELVIN, + CONF_MIN_MIREDS, + CONF_RED_TEMPLATE, CONF_STATE_TOPIC, PAYLOAD_NONE, ) @@ -64,18 +75,6 @@ DOMAIN = "mqtt_template" DEFAULT_NAME = "MQTT Template Light" -CONF_BLUE_TEMPLATE = "blue_template" -CONF_BRIGHTNESS_TEMPLATE = "brightness_template" -CONF_COLOR_TEMP_TEMPLATE = "color_temp_template" -CONF_COMMAND_OFF_TEMPLATE = "command_off_template" -CONF_COMMAND_ON_TEMPLATE = "command_on_template" -CONF_EFFECT_LIST = "effect_list" -CONF_EFFECT_TEMPLATE = "effect_template" -CONF_GREEN_TEMPLATE = "green_template" -CONF_MAX_MIREDS = "max_mireds" -CONF_MIN_MIREDS = "min_mireds" -CONF_RED_TEMPLATE = "red_template" - COMMAND_TEMPLATES = (CONF_COMMAND_ON_TEMPLATE, CONF_COMMAND_OFF_TEMPLATE) VALUE_TEMPLATES = ( CONF_BLUE_TEMPLATE, diff --git a/homeassistant/components/mqtt/models.py b/homeassistant/components/mqtt/models.py index bcfe94bbd58..8a42797b0f2 100644 --- a/homeassistant/components/mqtt/models.py +++ b/homeassistant/components/mqtt/models.py @@ -420,6 +420,12 @@ class MqttComponentConfig: discovery_payload: MQTTDiscoveryPayload +class DeviceMqttOptions(TypedDict, total=False): + """Hold the shared MQTT specific options for an MQTT device.""" + + qos: int + + class MqttDeviceData(TypedDict, total=False): """Hold the data for an MQTT device.""" @@ -430,6 +436,7 @@ class MqttDeviceData(TypedDict, total=False): hw_version: str model: str model_id: str + mqtt_settings: DeviceMqttOptions class MqttAvailabilityData(TypedDict, total=False): diff --git a/homeassistant/components/mqtt/sensor.py b/homeassistant/components/mqtt/sensor.py index 4d67b0d56e6..b27ef68368a 100644 --- a/homeassistant/components/mqtt/sensor.py +++ b/homeassistant/components/mqtt/sensor.py @@ -41,7 +41,15 @@ from homeassistant.util import dt as dt_util from . import subscription from .config import MQTT_RO_SCHEMA -from .const import CONF_OPTIONS, CONF_STATE_TOPIC, DOMAIN, PAYLOAD_NONE +from .const import ( + CONF_EXPIRE_AFTER, + CONF_LAST_RESET_VALUE_TEMPLATE, + CONF_OPTIONS, + CONF_STATE_TOPIC, + CONF_SUGGESTED_DISPLAY_PRECISION, + DOMAIN, + PAYLOAD_NONE, +) from .entity import MqttAvailabilityMixin, MqttEntity, async_setup_entity_entry_helper from .models import MqttValueTemplate, PayloadSentinel, ReceiveMessage from .schemas import MQTT_ENTITY_COMMON_SCHEMA @@ -51,10 +59,6 @@ _LOGGER = logging.getLogger(__name__) PARALLEL_UPDATES = 0 -CONF_EXPIRE_AFTER = "expire_after" -CONF_LAST_RESET_VALUE_TEMPLATE = "last_reset_value_template" -CONF_SUGGESTED_DISPLAY_PRECISION = "suggested_display_precision" - MQTT_SENSOR_ATTRIBUTES_BLOCKED = frozenset( { sensor.ATTR_LAST_RESET, diff --git a/homeassistant/components/mqtt/strings.json b/homeassistant/components/mqtt/strings.json index f0112097f4e..cedf120def1 100644 --- a/homeassistant/components/mqtt/strings.json +++ b/homeassistant/components/mqtt/strings.json @@ -126,7 +126,7 @@ "payload_not_available": "Payload not available" }, "data_description": { - "availability_topic": "Topic to receive the availabillity payload on", + "availability_topic": "Topic to receive the availability payload on", "availability_template": "A [template](https://www.home-assistant.io/docs/configuration/templating/#using-templates-with-the-mqtt-integration) to render the availability payload received on the availability topic", "payload_available": "The payload that indicates the device is available (defaults to 'online')", "payload_not_available": "The payload that indicates the device is not available (defaults to 'offline')" @@ -150,6 +150,17 @@ "hw_version": "The hardware version of the device. E.g. 'v1.0 rev a'.", "model": "E.g. 'Cleanmaster Pro'.", "model_id": "E.g. '123NK2PRO'." + }, + "sections": { + "mqtt_settings": { + "name": "MQTT Settings", + "data": { + "qos": "QoS" + }, + "data_description": { + "qos": "The Quality of Service value the device's entities should use." + } + } } }, "summary_menu": { @@ -198,20 +209,66 @@ "component": "Select the entity you want to update." } }, + "entity_platform_config": { + "title": "Configure MQTT device \"{mqtt_device}\"", + "description": "Please configure specific details for {platform} entity \"{entity}\":", + "data": { + "device_class": "Device class", + "state_class": "State class", + "unit_of_measurement": "Unit of measurement", + "options": "Add option" + }, + "data_description": { + "device_class": "The Device class of the {platform} entity. [Learn more.]({url}#device_class)", + "state_class": "The [State class](https://developers.home-assistant.io/docs/core/entity/sensor/#available-state-classes) of the sensor. [Learn more.]({url}#state_class)", + "unit_of_measurement": "Defines the unit of measurement of the sensor, if any.", + "options": "Options for allowed sensor state values. The sensor’s Device class must be set to Enumeration. The 'Options' setting cannot be used together with State class or Unit of measurement." + }, + "sections": { + "advanced_settings": { + "name": "Advanced options", + "data": { + "suggested_display_precision": "Suggested display precision" + }, + "data_description": { + "suggested_display_precision": "The number of decimals which should be used in the {platform} entity state after rounding. [Learn more.]({url}#suggested_display_precision)" + } + } + } + }, "mqtt_platform_config": { "title": "Configure MQTT device \"{mqtt_device}\"", "description": "Please configure MQTT specific details for {platform} entity \"{entity}\":", "data": { "command_topic": "Command topic", "command_template": "Command template", - "retain": "Retain", - "qos": "QoS" + "state_topic": "State topic", + "value_template": "Value template", + "last_reset_value_template": "Last reset value template", + "force_update": "Force update", + "optimistic": "Optimistic", + "retain": "Retain" }, "data_description": { - "command_topic": "The publishing topic that will be used to control the {platform} entity.", + "command_topic": "The publishing topic that will be used to control the {platform} entity. [Learn more.]({url}#command_topic)", "command_template": "A [template](https://www.home-assistant.io/docs/configuration/templating/#using-command-templates-with-mqtt) to render the payload to be published at the command topic.", - "retain": "Select if values published by the {platform} entity should be retained at the MQTT broker.", - "qos": "The QoS value {platform} entity should use." + "state_topic": "The MQTT topic subscribed to receive {platform} state values. [Learn more.]({url}#state_topic)", + "value_template": "Defines a [template](https://www.home-assistant.io/docs/configuration/templating/#using-value-templates-with-mqtt) to extract the {platform} entity value.", + "last_reset_value_template": "Defines a [template](https://www.home-assistant.io/docs/configuration/templating/#using-value-templates-with-mqtt) to extract the last reset. When Last reset template is set, the State class option must be Total. [Learn more.]({url}#last_reset_value_template)", + "force_update": "Sends update events even if the value hasn’t changed. Useful if you want to have meaningful value graphs in history. [Learn more.]({url}#force_update)", + "optimistic": "Flag that defines if the {platform} entity works in optimistic mode. [Learn more.]({url}#optimistic)", + "retain": "Select if values published by the {platform} entity should be retained at the MQTT broker." + }, + "sections": { + "advanced_settings": { + "name": "Advanced settings", + "data": { + "expire_after": "Expire after" + }, + "data_description": { + "expire_after": "If set, it defines the number of seconds after the sensor’s state expires, if it’s not updated. After expiry, the sensor’s state becomes unavailable. If not set, the sensor's state never expires. [Learn more.]({url}#expire_after)" + } + } } } }, @@ -225,7 +282,12 @@ "invalid_input": "Invalid value", "invalid_subscribe_topic": "Invalid subscribe topic", "invalid_template": "Invalid template", - "invalid_url": "Invalid URL" + "invalid_uom": "The unit of measurement \"{unit_of_measurement}\" is not supported by the selected device class, please either remove the device class, select a device class which supports \"{unit_of_measurement}\", or pick a supported unit of measurement from the list", + "invalid_url": "Invalid URL", + "options_not_allowed_with_state_class_or_uom": "The 'Options' setting is not allowed when state class or unit of measurement are used", + "options_device_class_enum": "The 'Options' setting must be used with the Enumeration device class. If you continue, the existing options will be reset", + "options_with_enum_device_class": "Configure options for the enumeration sensor", + "uom_required_for_device_class": "The selected device class requires a unit" } } }, @@ -342,9 +404,77 @@ } }, "selector": { + "device_class_sensor": { + "options": { + "apparent_power": "[%key:component::sensor::entity_component::apparent_power::name%]", + "area": "[%key:component::sensor::entity_component::area::name%]", + "aqi": "[%key:component::sensor::entity_component::aqi::name%]", + "atmospheric_pressure": "[%key:component::sensor::entity_component::atmospheric_pressure::name%]", + "battery": "[%key:component::sensor::entity_component::battery::name%]", + "blood_glucose_concentration": "[%key:component::sensor::entity_component::blood_glucose_concentration::name%]", + "carbon_dioxide": "[%key:component::sensor::entity_component::carbon_dioxide::name%]", + "carbon_monoxide": "[%key:component::sensor::entity_component::carbon_monoxide::name%]", + "conductivity": "[%key:component::sensor::entity_component::conductivity::name%]", + "current": "[%key:component::sensor::entity_component::current::name%]", + "data_rate": "[%key:component::sensor::entity_component::data_rate::name%]", + "data_size": "[%key:component::sensor::entity_component::data_size::name%]", + "date": "[%key:component::sensor::entity_component::date::name%]", + "distance": "[%key:component::sensor::entity_component::distance::name%]", + "duration": "[%key:component::sensor::entity_component::duration::name%]", + "energy": "[%key:component::sensor::entity_component::energy::name%]", + "energy_distance": "[%key:component::sensor::entity_component::energy_distance::name%]", + "energy_storage": "[%key:component::sensor::entity_component::energy_storage::name%]", + "enum": "Enumeration", + "frequency": "[%key:component::sensor::entity_component::frequency::name%]", + "gas": "[%key:component::sensor::entity_component::gas::name%]", + "humidity": "[%key:component::sensor::entity_component::humidity::name%]", + "illuminance": "[%key:component::sensor::entity_component::illuminance::name%]", + "irradiance": "[%key:component::sensor::entity_component::irradiance::name%]", + "moisture": "[%key:component::sensor::entity_component::moisture::name%]", + "monetary": "[%key:component::sensor::entity_component::monetary::name%]", + "nitrogen_dioxide": "[%key:component::sensor::entity_component::nitrogen_dioxide::name%]", + "nitrogen_monoxide": "[%key:component::sensor::entity_component::nitrogen_monoxide::name%]", + "nitrous_oxide": "[%key:component::sensor::entity_component::nitrous_oxide::name%]", + "ozone": "[%key:component::sensor::entity_component::ozone::name%]", + "ph": "[%key:component::sensor::entity_component::ph::name%]", + "pm1": "[%key:component::sensor::entity_component::pm1::name%]", + "pm10": "[%key:component::sensor::entity_component::pm10::name%]", + "pm25": "[%key:component::sensor::entity_component::pm25::name%]", + "power": "[%key:component::sensor::entity_component::power::name%]", + "power_factor": "[%key:component::sensor::entity_component::power_factor::name%]", + "precipitation": "[%key:component::sensor::entity_component::precipitation::name%]", + "precipitation_intensity": "[%key:component::sensor::entity_component::precipitation_intensity::name%]", + "pressure": "[%key:component::sensor::entity_component::pressure::name%]", + "reactive_power": "[%key:component::sensor::entity_component::reactive_power::name%]", + "signal_strength": "[%key:component::sensor::entity_component::signal_strength::name%]", + "sound_pressure": "[%key:component::sensor::entity_component::sound_pressure::name%]", + "speed": "[%key:component::sensor::entity_component::speed::name%]", + "sulphur_dioxide": "[%key:component::sensor::entity_component::sulphur_dioxide::name%]", + "temperature": "[%key:component::sensor::entity_component::temperature::name%]", + "timestamp": "[%key:component::sensor::entity_component::timestamp::name%]", + "volatile_organic_compounds": "[%key:component::sensor::entity_component::volatile_organic_compounds::name%]", + "volatile_organic_compounds_parts": "[%key:component::sensor::entity_component::volatile_organic_compounds_parts::name%]", + "voltage": "[%key:component::sensor::entity_component::voltage::name%]", + "volume": "[%key:component::sensor::entity_component::volume::name%]", + "volume_flow_rate": "[%key:component::sensor::entity_component::volume_flow_rate::name%]", + "volume_storage": "[%key:component::sensor::entity_component::volume_storage::name%]", + "water": "[%key:component::sensor::entity_component::water::name%]", + "weight": "[%key:component::sensor::entity_component::weight::name%]", + "wind_direction": "[%key:component::sensor::entity_component::wind_direction::name%]", + "wind_speed": "[%key:component::sensor::entity_component::wind_speed::name%]" + } + }, + "device_class_switch": { + "options": { + "outlet": "[%key:component::switch::entity_component::outlet::name%]", + "switch": "[%key:component::switch::title%]" + } + }, "platform": { "options": { - "notify": "Notify" + "notify": "Notify", + "sensor": "Sensor", + "switch": "Switch" } }, "set_ca_cert": { @@ -353,6 +483,13 @@ "auto": "Auto", "custom": "Custom" } + }, + "state_class": { + "options": { + "measurement": "[%key:component::sensor::entity_component::_::state_attributes::state_class::state::measurement%]", + "total": "[%key:component::sensor::entity_component::_::state_attributes::state_class::state::total%]", + "total_increasing": "[%key:component::sensor::entity_component::_::state_attributes::state_class::state::total_increasing%]" + } } }, "services": { diff --git a/homeassistant/components/mqtt/util.py b/homeassistant/components/mqtt/util.py index 27bdb4f2a35..e3996c80a8a 100644 --- a/homeassistant/components/mqtt/util.py +++ b/homeassistant/components/mqtt/util.py @@ -411,3 +411,9 @@ def migrate_certificate_file_to_content(file_name_or_auto: str) -> str | None: return certificate_file.read() except OSError: return None + + +@callback +def learn_more_url(platform: str) -> str: + """Return the URL for the platform specific MQTT documentation.""" + return f"https://www.home-assistant.io/integrations/{platform}.mqtt/" diff --git a/homeassistant/components/mullvad/config_flow.py b/homeassistant/components/mullvad/config_flow.py index c16f8879a7b..b179c5605ef 100644 --- a/homeassistant/components/mullvad/config_flow.py +++ b/homeassistant/components/mullvad/config_flow.py @@ -1,5 +1,6 @@ """Config flow for Mullvad VPN integration.""" +import logging from typing import Any from mullvad_api import MullvadAPI, MullvadAPIError @@ -8,6 +9,8 @@ from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from .const import DOMAIN +_LOGGER = logging.getLogger(__name__) + class MullvadConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Mullvad VPN.""" @@ -24,7 +27,8 @@ class MullvadConfigFlow(ConfigFlow, domain=DOMAIN): await self.hass.async_add_executor_job(MullvadAPI) except MullvadAPIError: errors["base"] = "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: return self.async_create_entry(title="Mullvad VPN", data=user_input) diff --git a/homeassistant/components/music_assistant/manifest.json b/homeassistant/components/music_assistant/manifest.json index fb8bb9c3ac2..28e8587e90c 100644 --- a/homeassistant/components/music_assistant/manifest.json +++ b/homeassistant/components/music_assistant/manifest.json @@ -7,6 +7,6 @@ "documentation": "https://www.home-assistant.io/integrations/music_assistant", "iot_class": "local_push", "loggers": ["music_assistant"], - "requirements": ["music-assistant-client==1.1.1"], + "requirements": ["music-assistant-client==1.2.0"], "zeroconf": ["_mass._tcp.local."] } diff --git a/homeassistant/components/music_assistant/media_player.py b/homeassistant/components/music_assistant/media_player.py index 56bde7bbae7..01a103f9bc4 100644 --- a/homeassistant/components/music_assistant/media_player.py +++ b/homeassistant/components/music_assistant/media_player.py @@ -94,6 +94,12 @@ SUPPORTED_FEATURES_BASE = ( | MediaPlayerEntityFeature.MEDIA_ENQUEUE | MediaPlayerEntityFeature.MEDIA_ANNOUNCE | MediaPlayerEntityFeature.SEEK + # we always add pause support, + # regardless if the underlying player actually natively supports pause + # because the MA behavior is to internally handle pause with stop + # (and a resume position) and we'd like to keep the UX consistent + # background info: https://github.com/home-assistant/core/issues/140118 + | MediaPlayerEntityFeature.PAUSE ) QUEUE_OPTION_MAP = { @@ -586,17 +592,24 @@ class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity): def _update_media_image_url( self, player: Player, queue: PlayerQueue | None ) -> None: - """Update image URL for the active queue item.""" - if queue is None or queue.current_item is None: - self._attr_media_image_url = None - return - if image_url := self.mass.get_media_item_image_url(queue.current_item): + """Update image URL.""" + if queue and queue.current_item: + # image_url is provided by an music-assistant queue + image_url = self.mass.get_media_item_image_url(queue.current_item) + elif player.current_media and player.current_media.image_url: + # image_url is provided by an external source + image_url = player.current_media.image_url + else: + image_url = None + + # check if the image is provided via music-assistant and therefore + # not accessible from the outside + if image_url: self._attr_media_image_remotely_accessible = ( self.mass.server_url not in image_url ) - self._attr_media_image_url = image_url - return - self._attr_media_image_url = None + + self._attr_media_image_url = image_url def _update_media_attributes( self, player: Player, queue: PlayerQueue | None @@ -697,8 +710,6 @@ class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity): supported_features = SUPPORTED_FEATURES_BASE if PlayerFeature.SET_MEMBERS in self.player.supported_features: supported_features |= MediaPlayerEntityFeature.GROUPING - if PlayerFeature.PAUSE in self.player.supported_features: - supported_features |= MediaPlayerEntityFeature.PAUSE if self.player.mute_control != PLAYER_CONTROL_NONE: supported_features |= MediaPlayerEntityFeature.VOLUME_MUTE if self.player.volume_control != PLAYER_CONTROL_NONE: diff --git a/homeassistant/components/mutesync/config_flow.py b/homeassistant/components/mutesync/config_flow.py index ef03df39968..a2aacfc927e 100644 --- a/homeassistant/components/mutesync/config_flow.py +++ b/homeassistant/components/mutesync/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations import asyncio +import logging from typing import Any import aiohttp @@ -16,6 +17,8 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN +_LOGGER = logging.getLogger(__name__) + STEP_USER_DATA_SCHEMA = vol.Schema({vol.Required("host"): str}) @@ -60,7 +63,8 @@ class MuteSyncConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" except InvalidAuth: errors["base"] = "invalid_auth" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: return self.async_create_entry( diff --git a/homeassistant/components/mysensors/sensor.py b/homeassistant/components/mysensors/sensor.py index 3a7101e6b39..3793bed8af2 100644 --- a/homeassistant/components/mysensors/sensor.py +++ b/homeassistant/components/mysensors/sensor.py @@ -102,6 +102,7 @@ SENSORS: dict[str, SensorEntityDescription] = { native_unit_of_measurement=DEGREE, icon="mdi:compass", device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), "V_WEIGHT": SensorEntityDescription( key="V_WEIGHT", diff --git a/homeassistant/components/mysensors/strings.json b/homeassistant/components/mysensors/strings.json index 30fe5f46d6b..1636cb076cc 100644 --- a/homeassistant/components/mysensors/strings.json +++ b/homeassistant/components/mysensors/strings.json @@ -21,16 +21,16 @@ "device": "IP address of the gateway", "tcp_port": "[%key:common::config_flow::data::port%]", "version": "MySensors version", - "persistence_file": "persistence file (leave empty to auto-generate)" + "persistence_file": "Persistence file (leave empty to auto-generate)" } }, "gw_serial": { "description": "Serial gateway setup", "data": { "device": "Serial port", - "baud_rate": "baud rate", + "baud_rate": "Baud rate", "version": "[%key:component::mysensors::config::step::gw_tcp::data::version%]", - "persistence_file": "Persistence file (leave empty to auto-generate)" + "persistence_file": "[%key:component::mysensors::config::step::gw_tcp::data::persistence_file%]" } }, "gw_mqtt": { @@ -40,7 +40,7 @@ "topic_in_prefix": "Prefix for input topics (topic_in_prefix)", "topic_out_prefix": "Prefix for output topics (topic_out_prefix)", "version": "[%key:component::mysensors::config::step::gw_tcp::data::version%]", - "persistence_file": "[%key:component::mysensors::config::step::gw_serial::data::persistence_file%]" + "persistence_file": "[%key:component::mysensors::config::step::gw_tcp::data::persistence_file%]" } } }, diff --git a/homeassistant/components/nasweb/config_flow.py b/homeassistant/components/nasweb/config_flow.py index 3a9ad3f7d49..298210903dc 100644 --- a/homeassistant/components/nasweb/config_flow.py +++ b/homeassistant/components/nasweb/config_flow.py @@ -103,7 +103,7 @@ class NASwebConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): errors["base"] = "missing_status" except AbortFlow: raise - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: diff --git a/homeassistant/components/neff/__init__.py b/homeassistant/components/neff/__init__.py new file mode 100644 index 00000000000..211ce088834 --- /dev/null +++ b/homeassistant/components/neff/__init__.py @@ -0,0 +1 @@ +"""Neff virtual integration.""" diff --git a/homeassistant/components/neff/manifest.json b/homeassistant/components/neff/manifest.json new file mode 100644 index 00000000000..1dfc91f94c9 --- /dev/null +++ b/homeassistant/components/neff/manifest.json @@ -0,0 +1,6 @@ +{ + "domain": "neff", + "name": "Neff", + "integration_type": "virtual", + "supported_by": "home_connect" +} diff --git a/homeassistant/components/nest/media_source.py b/homeassistant/components/nest/media_source.py index 146b6f2479e..a3d2901e911 100644 --- a/homeassistant/components/nest/media_source.py +++ b/homeassistant/components/nest/media_source.py @@ -20,8 +20,10 @@ from __future__ import annotations from collections.abc import Mapping from dataclasses import dataclass +import datetime import logging import os +import pathlib from typing import Any from google_nest_sdm.camera_traits import CameraClipPreviewTrait, CameraEventImageTrait @@ -46,6 +48,7 @@ from homeassistant.components.media_source import ( ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.storage import Store from homeassistant.helpers.template import DATE_STR_FORMAT from homeassistant.util import dt as dt_util @@ -72,6 +75,9 @@ MEDIA_PATH = f"{DOMAIN}/event_media" # Size of small in-memory disk cache to avoid excessive disk reads DISK_READ_LRU_MAX_SIZE = 32 +# Remove orphaned media files that are older than this age +ORPHANED_MEDIA_AGE_CUTOFF = datetime.timedelta(days=7) + async def async_get_media_event_store( hass: HomeAssistant, subscriber: GoogleNestSubscriber @@ -123,6 +129,12 @@ class NestEventMediaStore(EventMediaStore): self._media_path = media_path self._data: dict[str, Any] | None = None self._devices: Mapping[str, str] | None = {} + # Invoke garbage collection for orphaned files one per + async_track_time_interval( + hass, + self.async_remove_orphaned_media, + datetime.timedelta(days=1), + ) async def async_load(self) -> dict | None: """Load data.""" @@ -249,6 +261,68 @@ class NestEventMediaStore(EventMediaStore): devices[device.name] = device_entry.id return devices + async def async_remove_orphaned_media(self, now: datetime.datetime) -> None: + """Remove any media files that are orphaned and not referenced by the active event data. + + The event media store handles garbage collection, but there may be cases where files are + left around or unable to be removed. This is a scheduled event that will also check for + old orphaned files and remove them when the events are not referenced in the active list + of event data. + + Event media files are stored with the format -.suffix. We extract + the list of valid timestamps from the event data and remove any files that are not in that list + or are older than the cutoff time. + """ + _LOGGER.debug("Checking for orphaned media at %s", now) + + def _cleanup(event_timestamps: dict[str, set[int]]) -> None: + time_cutoff = (now - ORPHANED_MEDIA_AGE_CUTOFF).timestamp() + media_path = pathlib.Path(self._media_path) + for device_id, valid_timestamps in event_timestamps.items(): + media_files = list(media_path.glob(f"{device_id}/*")) + _LOGGER.debug("Found %d files (device=%s)", len(media_files), device_id) + for media_file in media_files: + if "-" not in media_file.name: + continue + try: + timestamp = int(media_file.name.split("-")[0]) + except ValueError: + continue + if timestamp in valid_timestamps or timestamp > time_cutoff: + continue + _LOGGER.debug("Removing orphaned media file: %s", media_file) + try: + os.remove(media_file) + except OSError as err: + _LOGGER.error( + "Unable to remove orphaned media file: %s %s", + media_file, + err, + ) + + # Nest device id mapped to home assistant device id + event_timestamps = await self._get_valid_event_timestamps() + await self._hass.async_add_executor_job(_cleanup, event_timestamps) + + async def _get_valid_event_timestamps(self) -> dict[str, set[int]]: + """Return a mapping of home assistant device id to valid timestamps.""" + device_map = await self._get_devices() + event_data = await self.async_load() or {} + valid_device_timestamps = {} + for nest_device_id, device_id in device_map.items(): + if (device_events := event_data.get(nest_device_id, {})) is None: + continue + valid_device_timestamps[device_id] = { + int( + datetime.datetime.fromisoformat( + camera_event["timestamp"] + ).timestamp() + ) + for events in device_events + for camera_event in events["events"].values() + } + return valid_device_timestamps + async def async_get_media_source(hass: HomeAssistant) -> MediaSource: """Set up Nest media source.""" diff --git a/homeassistant/components/netatmo/sensor.py b/homeassistant/components/netatmo/sensor.py index 5f8084d542c..56b8233912f 100644 --- a/homeassistant/components/netatmo/sensor.py +++ b/homeassistant/components/netatmo/sensor.py @@ -213,7 +213,8 @@ SENSOR_TYPES: tuple[NetatmoSensorEntityDescription, ...] = ( netatmo_name="wind_angle", entity_registry_enabled_default=False, native_unit_of_measurement=DEGREE, - state_class=SensorStateClass.MEASUREMENT, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + device_class=SensorDeviceClass.WIND_DIRECTION, ), NetatmoSensorEntityDescription( key="windstrength", @@ -235,7 +236,8 @@ SENSOR_TYPES: tuple[NetatmoSensorEntityDescription, ...] = ( netatmo_name="gust_angle", entity_registry_enabled_default=False, native_unit_of_measurement=DEGREE, - state_class=SensorStateClass.MEASUREMENT, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + device_class=SensorDeviceClass.WIND_DIRECTION, ), NetatmoSensorEntityDescription( key="guststrength", @@ -345,7 +347,8 @@ PUBLIC_WEATHER_STATION_TYPES: tuple[ key="windangle_value", entity_registry_enabled_default=False, native_unit_of_measurement=DEGREE, - state_class=SensorStateClass.MEASUREMENT, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + device_class=SensorDeviceClass.WIND_DIRECTION, value_fn=lambda area: area.get_latest_wind_angles(), ), NetatmoPublicWeatherSensorEntityDescription( @@ -360,7 +363,8 @@ PUBLIC_WEATHER_STATION_TYPES: tuple[ translation_key="gust_angle", entity_registry_enabled_default=False, native_unit_of_measurement=DEGREE, - state_class=SensorStateClass.MEASUREMENT, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + device_class=SensorDeviceClass.WIND_DIRECTION, value_fn=lambda area: area.get_latest_gust_angles(), ), NetatmoPublicWeatherSensorEntityDescription( diff --git a/homeassistant/components/nextdns/config_flow.py b/homeassistant/components/nextdns/config_flow.py index d3327c4c08b..d36064d8fb0 100644 --- a/homeassistant/components/nextdns/config_flow.py +++ b/homeassistant/components/nextdns/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +import logging from typing import Any from aiohttp.client_exceptions import ClientConnectorError @@ -19,6 +20,8 @@ from .const import CONF_PROFILE_ID, DOMAIN AUTH_SCHEMA = vol.Schema({vol.Required(CONF_API_KEY): str}) +_LOGGER = logging.getLogger(__name__) + async def async_init_nextdns(hass: HomeAssistant, api_key: str) -> NextDns: """Check if credentials are valid.""" @@ -51,7 +54,8 @@ class NextDnsFlowHandler(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_api_key" except (ApiError, ClientConnectorError, RetryError, TimeoutError): errors["base"] = "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: return await self.async_step_profiles() @@ -111,7 +115,8 @@ class NextDnsFlowHandler(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_api_key" except (ApiError, ClientConnectorError, RetryError, TimeoutError): errors["base"] = "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: return self.async_update_reload_and_abort( diff --git a/homeassistant/components/niko_home_control/config_flow.py b/homeassistant/components/niko_home_control/config_flow.py index f37e5e9248a..76e71bc1690 100644 --- a/homeassistant/components/niko_home_control/config_flow.py +++ b/homeassistant/components/niko_home_control/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +import logging from typing import Any from nhc.controller import NHCController @@ -12,6 +13,8 @@ from homeassistant.const import CONF_HOST from .const import DOMAIN +_LOGGER = logging.getLogger(__name__) + DATA_SCHEMA = vol.Schema( { vol.Required(CONF_HOST): str, @@ -25,7 +28,8 @@ async def test_connection(host: str) -> str | None: controller = NHCController(host, 8000) try: await controller.connect() - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") return "cannot_connect" return None diff --git a/homeassistant/components/nmbs/sensor.py b/homeassistant/components/nmbs/sensor.py index 822b0236dd0..3552ac3c26d 100644 --- a/homeassistant/components/nmbs/sensor.py +++ b/homeassistant/components/nmbs/sensor.py @@ -360,7 +360,7 @@ class NMBSSensor(SensorEntity): attrs[ATTR_LONGITUDE] = self.station_coordinates[1] if self.is_via_connection and not self._excl_vias: - via = self._attrs.vias.via[0] + via = self._attrs.vias[0] attrs["via"] = via.station attrs["via_arrival_platform"] = via.arrival.platform diff --git a/homeassistant/components/nobo_hub/strings.json b/homeassistant/components/nobo_hub/strings.json index 28be01862e9..1059934e896 100644 --- a/homeassistant/components/nobo_hub/strings.json +++ b/homeassistant/components/nobo_hub/strings.json @@ -44,7 +44,7 @@ "entity": { "select": { "global_override": { - "name": "global override", + "name": "Global override", "state": { "away": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::away%]", "comfort": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::comfort%]", @@ -53,7 +53,7 @@ } }, "week_profile": { - "name": "week profile" + "name": "Week profile" } } } diff --git a/homeassistant/components/nut/__init__.py b/homeassistant/components/nut/__init__.py index 5b188868819..dc260dffe96 100644 --- a/homeassistant/components/nut/__init__.py +++ b/homeassistant/components/nut/__init__.py @@ -79,9 +79,21 @@ async def async_setup_entry(hass: HomeAssistant, entry: NutConfigEntry) -> bool: try: return await data.async_update() except NUTLoginError as err: - raise ConfigEntryAuthFailed from err + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="device_authentication", + translation_placeholders={ + "err": str(err), + }, + ) from err except NUTError as err: - raise UpdateFailed(f"Error fetching UPS state: {err}") from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="data_fetch_error", + translation_placeholders={ + "err": str(err), + }, + ) from err coordinator = DataUpdateCoordinator( hass, @@ -328,7 +340,12 @@ class PyNUTData: await self._client.run_command(self._alias, command_name) except NUTError as err: raise HomeAssistantError( - f"Error running command {command_name}, {err}" + translation_domain=DOMAIN, + translation_key="nut_command_error", + translation_placeholders={ + "command_name": command_name, + "err": str(err), + }, ) from err async def async_list_commands(self) -> set[str] | None: diff --git a/homeassistant/components/nut/device_action.py b/homeassistant/components/nut/device_action.py index ffaa195deaf..86f7fe5a7e6 100644 --- a/homeassistant/components/nut/device_action.py +++ b/homeassistant/components/nut/device_action.py @@ -51,7 +51,11 @@ async def async_call_action_from_config( runtime_data = _get_runtime_data_from_device_id(hass, device_id) if not runtime_data: raise InvalidDeviceAutomationConfig( - f"Unable to find a NUT device with id {device_id}" + translation_domain=DOMAIN, + translation_key="device_invalid", + translation_placeholders={ + "device_id": device_id, + }, ) await runtime_data.data.async_run_command(command_name) diff --git a/homeassistant/components/nut/icons.json b/homeassistant/components/nut/icons.json index c98d80ef55d..a795368005c 100644 --- a/homeassistant/components/nut/icons.json +++ b/homeassistant/components/nut/icons.json @@ -42,11 +42,26 @@ "battery_packs_bad": { "default": "mdi:information-outline" }, + "battery_runtime": { + "default": "mdi:clock-outline" + }, + "battery_runtime_low": { + "default": "mdi:clock-alert-outline" + }, + "battery_runtime_restart": { + "default": "mdi:clock-start" + }, "battery_type": { "default": "mdi:information-outline" }, + "battery_voltage_high": { + "default": "mdi:battery-high" + }, + "battery_voltage_low": { + "default": "mdi:battery-low" + }, "input_bypass_phases": { - "default": "mdi:information-outline" + "default": "mdi:sine-wave" }, "input_current_status": { "default": "mdi:information-outline" @@ -55,13 +70,10 @@ "default": "mdi:information-outline" }, "input_load": { - "default": "mdi:gauge" + "default": "mdi:percent-box-outline" }, "input_phases": { - "default": "mdi:information-outline" - }, - "input_power": { - "default": "mdi:gauge" + "default": "mdi:sine-wave" }, "input_sensitivity": { "default": "mdi:information-outline" @@ -72,35 +84,23 @@ "input_voltage_status": { "default": "mdi:information-outline" }, - "outlet_number_current": { - "default": "mdi:gauge" - }, "outlet_number_current_status": { "default": "mdi:information-outline" }, "outlet_number_desc": { "default": "mdi:information-outline" }, - "outlet_number_power": { - "default": "mdi:gauge" - }, - "outlet_number_realpower": { - "default": "mdi:gauge" - }, - "outlet_voltage": { - "default": "mdi:gauge" - }, "output_l1_power_percent": { - "default": "mdi:gauge" + "default": "mdi:percent-circle-outline" }, "output_l2_power_percent": { - "default": "mdi:gauge" + "default": "mdi:percent-circle-outline" }, "output_l3_power_percent": { - "default": "mdi:gauge" + "default": "mdi:percent-circle-outline" }, "output_phases": { - "default": "mdi:information-outline" + "default": "mdi:sine-wave" }, "ups_alarm": { "default": "mdi:alarm" @@ -111,20 +111,29 @@ "ups_contacts": { "default": "mdi:information-outline" }, + "ups_delay_reboot": { + "default": "mdi:timelapse" + }, + "ups_delay_shutdown": { + "default": "mdi:timelapse" + }, + "ups_delay_start": { + "default": "mdi:timelapse" + }, "ups_display_language": { "default": "mdi:information-outline" }, "ups_efficiency": { - "default": "mdi:gauge" + "default": "mdi:percent-outline" }, "ups_id": { "default": "mdi:information-outline" }, "ups_load": { - "default": "mdi:gauge" + "default": "mdi:percent-box-outline" }, "ups_load_high": { - "default": "mdi:gauge" + "default": "mdi:percent-box-outline" }, "ups_shutdown": { "default": "mdi:information-outline" @@ -147,9 +156,21 @@ "ups_test_date": { "default": "mdi:calendar" }, + "ups_test_interval": { + "default": "mdi:timelapse" + }, "ups_test_result": { "default": "mdi:information-outline" }, + "ups_timer_reboot": { + "default": "mdi:timer-refresh-outline" + }, + "ups_timer_shutdown": { + "default": "mdi:timer-stop-outline" + }, + "ups_timer_start": { + "default": "mdi:timer-play-outline" + }, "ups_type": { "default": "mdi:information-outline" }, diff --git a/homeassistant/components/nut/sensor.py b/homeassistant/components/nut/sensor.py index 5c01314dedf..1781615b0f9 100644 --- a/homeassistant/components/nut/sensor.py +++ b/homeassistant/components/nut/sensor.py @@ -523,6 +523,7 @@ SENSOR_TYPES: Final[dict[str, SensorEntityDescription]] = { "input.power": SensorEntityDescription( key="input.power", translation_key="input_power", + native_unit_of_measurement=UnitOfApparentPower.VOLT_AMPERE, device_class=SensorDeviceClass.APPARENT_POWER, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, @@ -988,13 +989,6 @@ SENSOR_TYPES: Final[dict[str, SensorEntityDescription]] = { entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "watts": SensorEntityDescription( - key="watts", - translation_key="watts", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - ), } @@ -1103,6 +1097,6 @@ class NUTSensor(NUTBaseEntity, SensorEntity): def _format_display_state(status: dict[str, str]) -> str: """Return UPS display state.""" try: - return " ".join(STATE_TYPES[state] for state in status[KEY_STATUS].split()) + return ", ".join(STATE_TYPES[state] for state in status[KEY_STATUS].split()) except KeyError: return STATE_UNKNOWN diff --git a/homeassistant/components/nut/strings.json b/homeassistant/components/nut/strings.json index 1a54dffef11..56952778753 100644 --- a/homeassistant/components/nut/strings.json +++ b/homeassistant/components/nut/strings.json @@ -83,9 +83,27 @@ }, "sensor": { "ambient_humidity": { "name": "Ambient humidity" }, - "ambient_humidity_status": { "name": "Ambient humidity status" }, + "ambient_humidity_status": { + "name": "Ambient humidity status", + "state": { + "good": "Good", + "warning-low": "Warning low", + "critical-low": "Critical low", + "warning-high": "Warning high", + "critical-high": "Critical high" + } + }, "ambient_temperature": { "name": "Ambient temperature" }, - "ambient_temperature_status": { "name": "Ambient temperature status" }, + "ambient_temperature_status": { + "name": "Ambient temperature status", + "state": { + "good": "[%key:component::nut::entity::sensor::ambient_humidity_status::state::good%]", + "warning-low": "[%key:component::nut::entity::sensor::ambient_humidity_status::state::warning-low%]", + "critical-low": "[%key:component::nut::entity::sensor::ambient_humidity_status::state::critical-low%]", + "warning-high": "[%key:component::nut::entity::sensor::ambient_humidity_status::state::warning-high%]", + "critical-high": "[%key:component::nut::entity::sensor::ambient_humidity_status::state::critical-high%]" + } + }, "battery_alarm_threshold": { "name": "Battery alarm threshold" }, "battery_capacity": { "name": "Battery capacity" }, "battery_charge": { "name": "Battery charge" }, @@ -212,11 +230,24 @@ "ups_timer_shutdown": { "name": "Load shutdown timer" }, "ups_timer_start": { "name": "Load start timer" }, "ups_type": { "name": "UPS type" }, - "ups_watchdog_status": { "name": "Watchdog status" }, - "watts": { "name": "Watts" } + "ups_watchdog_status": { "name": "Watchdog status" } }, "switch": { "outlet_number_load_poweronoff": { "name": "Power outlet {outlet_name}" } } + }, + "exceptions": { + "data_fetch_error": { + "message": "Error fetching UPS state: {err}" + }, + "device_authentication": { + "message": "Device authentication error: {err}" + }, + "device_invalid": { + "message": "Unable to find a NUT device with ID {device_id}" + }, + "nut_command_error": { + "message": "Error running command {command_name}, {err}" + } } } diff --git a/homeassistant/components/nws/sensor.py b/homeassistant/components/nws/sensor.py index 4cfb3b85e0f..8a7631d8381 100644 --- a/homeassistant/components/nws/sensor.py +++ b/homeassistant/components/nws/sensor.py @@ -115,6 +115,7 @@ SENSOR_TYPES: tuple[NWSSensorEntityDescription, ...] = ( native_unit_of_measurement=DEGREE, unit_convert=DEGREE, device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), NWSSensorEntityDescription( key="barometricPressure", diff --git a/homeassistant/components/octoprint/config_flow.py b/homeassistant/components/octoprint/config_flow.py index 010b45e5a1c..e20eea0a61f 100644 --- a/homeassistant/components/octoprint/config_flow.py +++ b/homeassistant/components/octoprint/config_flow.py @@ -85,7 +85,8 @@ class OctoPrintConfigFlow(ConfigFlow, domain=DOMAIN): raise err from None except CannotConnect: errors["base"] = "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" if errors: diff --git a/homeassistant/components/ohme/quality_scale.yaml b/homeassistant/components/ohme/quality_scale.yaml index f748cf339b4..12473a08edd 100644 --- a/homeassistant/components/ohme/quality_scale.yaml +++ b/homeassistant/components/ohme/quality_scale.yaml @@ -48,17 +48,20 @@ rules: status: exempt comment: | All supported devices are cloud connected over mobile data. Discovery is not possible. - docs-data-update: todo - docs-examples: todo - docs-known-limitations: todo + docs-data-update: done + docs-examples: done + docs-known-limitations: done docs-supported-devices: done - docs-supported-functions: todo - docs-troubleshooting: todo - docs-use-cases: todo - dynamic-devices: todo - entity-category: todo + docs-supported-functions: done + docs-troubleshooting: done + docs-use-cases: done + dynamic-devices: + status: exempt + comment: | + Not supported by the API. Accounts and devices have a one-to-one relationship. + entity-category: done entity-device-class: done - entity-disabled-by-default: todo + entity-disabled-by-default: done entity-translations: done exception-translations: done icon-translations: done @@ -67,7 +70,10 @@ rules: status: exempt comment: | This integration currently has no repairs. - stale-devices: todo + stale-devices: + status: exempt + comment: | + Not supported by the API. Accounts and devices have a one-to-one relationship. # Platinum async-dependency: todo inject-websession: todo diff --git a/homeassistant/components/ohme/sensor.py b/homeassistant/components/ohme/sensor.py index d0425040b53..6b9e1e9c5a7 100644 --- a/homeassistant/components/ohme/sensor.py +++ b/homeassistant/components/ohme/sensor.py @@ -99,6 +99,7 @@ SENSOR_ADVANCED_SETTINGS = [ native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, value_fn=lambda client: client.power.ct_amps, is_supported_fn=lambda client: client.ct_connected, + entity_registry_enabled_default=False, ), ] diff --git a/homeassistant/components/onkyo/config_flow.py b/homeassistant/components/onkyo/config_flow.py index 5d941be959a..85ff0de3251 100644 --- a/homeassistant/components/onkyo/config_flow.py +++ b/homeassistant/components/onkyo/config_flow.py @@ -14,7 +14,7 @@ from homeassistant.config_entries import ( ConfigFlowResult, OptionsFlow, ) -from homeassistant.const import CONF_HOST, CONF_NAME +from homeassistant.const import CONF_HOST from homeassistant.core import callback from homeassistant.data_entry_flow import section from homeassistant.helpers.selector import ( @@ -30,8 +30,6 @@ from homeassistant.helpers.selector import ( from homeassistant.helpers.service_info.ssdp import SsdpServiceInfo from .const import ( - CONF_RECEIVER_MAX_VOLUME, - CONF_SOURCES, DOMAIN, OPTION_INPUT_SOURCES, OPTION_LISTENING_MODES, @@ -329,61 +327,6 @@ class OnkyoConfigFlow(ConfigFlow, domain=DOMAIN): """Handle reconfiguration of the receiver.""" return await self.async_step_manual() - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: - """Import the yaml config.""" - _LOGGER.debug("Import flow user input: %s", user_input) - - host: str = user_input[CONF_HOST] - name: str | None = user_input.get(CONF_NAME) - user_max_volume: int = user_input[OPTION_MAX_VOLUME] - user_volume_resolution: int = user_input[CONF_RECEIVER_MAX_VOLUME] - user_sources: dict[InputSource, str] = user_input[CONF_SOURCES] - - info: ReceiverInfo | None = user_input.get("info") - if info is None: - try: - info = await async_interview(host) - except Exception: - _LOGGER.exception("Import flow interview error for host %s", host) - return self.async_abort(reason="cannot_connect") - - if info is None: - _LOGGER.error("Import flow interview error for host %s", host) - return self.async_abort(reason="cannot_connect") - - unique_id = info.identifier - await self.async_set_unique_id(unique_id) - self._abort_if_unique_id_configured() - - name = name or info.model_name - - volume_resolution = VOLUME_RESOLUTION_ALLOWED[-1] - for volume_resolution_allowed in VOLUME_RESOLUTION_ALLOWED: - if user_volume_resolution <= volume_resolution_allowed: - volume_resolution = volume_resolution_allowed - break - - max_volume = min( - 100, user_max_volume * user_volume_resolution / volume_resolution - ) - - sources_store: dict[str, str] = {} - for source, source_name in user_sources.items(): - sources_store[source.value] = source_name - - return self.async_create_entry( - title=name, - data={ - CONF_HOST: host, - }, - options={ - OPTION_VOLUME_RESOLUTION: volume_resolution, - OPTION_MAX_VOLUME: max_volume, - OPTION_INPUT_SOURCES: sources_store, - OPTION_LISTENING_MODES: LISTENING_MODES_DEFAULT, - }, - ) - @staticmethod @callback def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: diff --git a/homeassistant/components/onkyo/const.py b/homeassistant/components/onkyo/const.py index fcb1a8a0a9e..851d80c5100 100644 --- a/homeassistant/components/onkyo/const.py +++ b/homeassistant/components/onkyo/const.py @@ -11,9 +11,6 @@ DOMAIN = "onkyo" DEVICE_INTERVIEW_TIMEOUT = 5 DEVICE_DISCOVERY_TIMEOUT = 5 -CONF_SOURCES = "sources" -CONF_RECEIVER_MAX_VOLUME = "receiver_max_volume" - type VolumeResolution = Literal[50, 80, 100, 200] OPTION_VOLUME_RESOLUTION = "volume_resolution" OPTION_VOLUME_RESOLUTION_DEFAULT: VolumeResolution = 50 diff --git a/homeassistant/components/onkyo/media_player.py b/homeassistant/components/onkyo/media_player.py index f7fe83c57a3..aed7c51af80 100644 --- a/homeassistant/components/onkyo/media_player.py +++ b/homeassistant/components/onkyo/media_player.py @@ -8,32 +8,18 @@ from functools import cache import logging from typing import Any, Literal -import voluptuous as vol - from homeassistant.components.media_player import ( - PLATFORM_SCHEMA as MEDIA_PLAYER_PLATFORM_SCHEMA, MediaPlayerEntity, MediaPlayerEntityFeature, MediaPlayerState, MediaType, ) -from homeassistant.config_entries import SOURCE_IMPORT -from homeassistant.const import CONF_HOST, CONF_NAME -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback -from homeassistant.data_entry_flow import FlowResultType +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers import config_validation as cv, entity_registry as er -from homeassistant.helpers.entity_platform import ( - AddConfigEntryEntitiesCallback, - AddEntitiesCallback, -) -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from . import OnkyoConfigEntry from .const import ( - CONF_RECEIVER_MAX_VOLUME, - CONF_SOURCES, DOMAIN, OPTION_MAX_VOLUME, OPTION_VOLUME_RESOLUTION, @@ -43,46 +29,11 @@ from .const import ( ListeningMode, VolumeResolution, ) -from .receiver import Receiver, async_discover +from .receiver import Receiver from .services import DATA_MP_ENTITIES _LOGGER = logging.getLogger(__name__) -CONF_MAX_VOLUME_DEFAULT = 100 -CONF_RECEIVER_MAX_VOLUME_DEFAULT = 80 -CONF_SOURCES_DEFAULT = { - "tv": "TV", - "bd": "Bluray", - "game": "Game", - "aux1": "Aux1", - "video1": "Video 1", - "video2": "Video 2", - "video3": "Video 3", - "video4": "Video 4", - "video5": "Video 5", - "video6": "Video 6", - "video7": "Video 7", - "fm": "Radio", -} - -ISSUE_URL_PLACEHOLDER = "/config/integrations/dashboard/add?domain=onkyo" - -PLATFORM_SCHEMA = MEDIA_PLAYER_PLATFORM_SCHEMA.extend( - { - vol.Optional(CONF_HOST): cv.string, - vol.Optional(CONF_NAME): cv.string, - vol.Optional(OPTION_MAX_VOLUME, default=CONF_MAX_VOLUME_DEFAULT): vol.All( - vol.Coerce(int), vol.Range(min=1, max=100) - ), - vol.Optional( - CONF_RECEIVER_MAX_VOLUME, default=CONF_RECEIVER_MAX_VOLUME_DEFAULT - ): cv.positive_int, - vol.Optional(CONF_SOURCES, default=CONF_SOURCES_DEFAULT): { - cv.string: cv.string - }, - } -) - SUPPORTED_FEATURES_BASE = ( MediaPlayerEntityFeature.TURN_ON @@ -194,122 +145,6 @@ def _rev_listening_mode_lib_mappings(zone: str) -> dict[LibValue, ListeningMode] return {value: key for key, value in _listening_mode_lib_mappings(zone).items()} -async def async_setup_platform( - hass: HomeAssistant, - config: ConfigType, - async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Import config from yaml.""" - host = config.get(CONF_HOST) - - source_mapping: dict[str, InputSource] = {} - for zone in ZONES: - for source, source_lib in _input_source_lib_mappings(zone).items(): - if isinstance(source_lib, str): - source_mapping.setdefault(source_lib, source) - else: - for source_lib_single in source_lib: - source_mapping.setdefault(source_lib_single, source) - - sources: dict[InputSource, str] = {} - for source_lib_single, source_name in config[CONF_SOURCES].items(): - user_source = source_mapping.get(source_lib_single.lower()) - if user_source is not None: - sources[user_source] = source_name - - config[CONF_SOURCES] = sources - - results = [] - if host is not None: - _LOGGER.debug("Importing yaml single: %s", host) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=config - ) - results.append((host, result)) - else: - for info in await async_discover(): - host = info.host - - # Migrate legacy entities. - registry = er.async_get(hass) - old_unique_id = f"{info.model_name}_{info.identifier}" - new_unique_id = f"{info.identifier}_main" - entity_id = registry.async_get_entity_id( - "media_player", DOMAIN, old_unique_id - ) - if entity_id is not None: - _LOGGER.debug( - "Migrating unique_id from [%s] to [%s] for entity %s", - old_unique_id, - new_unique_id, - entity_id, - ) - registry.async_update_entity(entity_id, new_unique_id=new_unique_id) - - _LOGGER.debug("Importing yaml discover: %s", info.host) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=config | {CONF_HOST: info.host} | {"info": info}, - ) - results.append((host, result)) - - _LOGGER.debug("Importing yaml results: %s", results) - if not results: - async_create_issue( - hass, - DOMAIN, - "deprecated_yaml_import_issue_no_discover", - breaks_in_ha_version="2025.5.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml_import_issue_no_discover", - translation_placeholders={"url": ISSUE_URL_PLACEHOLDER}, - ) - - all_successful = True - for host, result in results: - if ( - result.get("type") == FlowResultType.CREATE_ENTRY - or result.get("reason") == "already_configured" - ): - continue - if error := result.get("reason"): - all_successful = False - async_create_issue( - hass, - DOMAIN, - f"deprecated_yaml_import_issue_{host}_{error}", - breaks_in_ha_version="2025.5.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key=f"deprecated_yaml_import_issue_{error}", - translation_placeholders={ - "host": host, - "url": ISSUE_URL_PLACEHOLDER, - }, - ) - - if all_successful: - async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - is_fixable=False, - issue_domain=DOMAIN, - breaks_in_ha_version="2025.5.0", - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "onkyo", - }, - ) - - async def async_setup_entry( hass: HomeAssistant, entry: OnkyoConfigEntry, diff --git a/homeassistant/components/onkyo/strings.json b/homeassistant/components/onkyo/strings.json index d8131dd1149..3e5520c79f7 100644 --- a/homeassistant/components/onkyo/strings.json +++ b/homeassistant/components/onkyo/strings.json @@ -83,16 +83,6 @@ "empty_listening_mode_list": "Listening mode list cannot be empty" } }, - "issues": { - "deprecated_yaml_import_issue_no_discover": { - "title": "The Onkyo YAML configuration import failed", - "description": "Configuring Onkyo using YAML is being removed but no receivers were discovered when importing your YAML configuration.\n\nEnsure the connection to the receiver works and restart Home Assistant to try again or remove the Onkyo YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." - }, - "deprecated_yaml_import_issue_cannot_connect": { - "title": "The Onkyo YAML configuration import failed", - "description": "Configuring Onkyo using YAML is being removed but there was a connection error when importing your YAML configuration for host {host}.\n\nEnsure the connection to the receiver works and restart Home Assistant to try again or remove the Onkyo YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." - } - }, "exceptions": { "invalid_sound_mode": { "message": "Cannot select sound mode \"{invalid_sound_mode}\" for entity: {entity_id}." diff --git a/homeassistant/components/openai_conversation/__init__.py b/homeassistant/components/openai_conversation/__init__.py index fcf6ab298dc..276f5ddea3b 100644 --- a/homeassistant/components/openai_conversation/__init__.py +++ b/homeassistant/components/openai_conversation/__init__.py @@ -11,6 +11,7 @@ from openai.types.images_response import ImagesResponse from openai.types.responses import ( EasyInputMessageParam, Response, + ResponseInputFileParam, ResponseInputImageParam, ResponseInputMessageContentListParam, ResponseInputParam, @@ -132,19 +133,28 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: if not Path(filename).exists(): raise HomeAssistantError(f"`{filename}` does not exist") mime_type, base64_file = encode_file(filename) - if "image/" not in mime_type: + if "image/" in mime_type: + content.append( + ResponseInputImageParam( + type="input_image", + file_id=filename, + image_url=f"data:{mime_type};base64,{base64_file}", + detail="auto", + ) + ) + elif "application/pdf" in mime_type: + content.append( + ResponseInputFileParam( + type="input_file", + filename=filename, + file_data=f"data:{mime_type};base64,{base64_file}", + ) + ) + else: raise HomeAssistantError( - "Only images are supported by the OpenAI API," - f"`{filename}` is not an image file" + "Only images and PDF are supported by the OpenAI API," + f"`{filename}` is not an image file or PDF" ) - content.append( - ResponseInputImageParam( - type="input_image", - file_id=filename, - image_url=f"data:{mime_type};base64,{base64_file}", - detail="auto", - ) - ) if CONF_FILENAMES in call.data: await hass.async_add_executor_job(append_files_to_content) diff --git a/homeassistant/components/openai_conversation/config_flow.py b/homeassistant/components/openai_conversation/config_flow.py index c631884ea0b..7304eb52da3 100644 --- a/homeassistant/components/openai_conversation/config_flow.py +++ b/homeassistant/components/openai_conversation/config_flow.py @@ -2,22 +2,31 @@ from __future__ import annotations +import json import logging from types import MappingProxyType from typing import Any import openai import voluptuous as vol +from voluptuous_openapi import convert +from homeassistant.components.zone import ENTITY_ID_HOME from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, OptionsFlow, ) -from homeassistant.const import CONF_API_KEY, CONF_LLM_HASS_API +from homeassistant.const import ( + ATTR_LATITUDE, + ATTR_LONGITUDE, + CONF_API_KEY, + CONF_LLM_HASS_API, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import llm +from homeassistant.helpers.httpx_client import get_async_client from homeassistant.helpers.selector import ( NumberSelector, NumberSelectorConfig, @@ -37,12 +46,22 @@ from .const import ( CONF_RECOMMENDED, CONF_TEMPERATURE, CONF_TOP_P, + CONF_WEB_SEARCH, + CONF_WEB_SEARCH_CITY, + CONF_WEB_SEARCH_CONTEXT_SIZE, + CONF_WEB_SEARCH_COUNTRY, + CONF_WEB_SEARCH_REGION, + CONF_WEB_SEARCH_TIMEZONE, + CONF_WEB_SEARCH_USER_LOCATION, DOMAIN, RECOMMENDED_CHAT_MODEL, RECOMMENDED_MAX_TOKENS, RECOMMENDED_REASONING_EFFORT, RECOMMENDED_TEMPERATURE, RECOMMENDED_TOP_P, + RECOMMENDED_WEB_SEARCH, + RECOMMENDED_WEB_SEARCH_CONTEXT_SIZE, + RECOMMENDED_WEB_SEARCH_USER_LOCATION, UNSUPPORTED_MODELS, ) @@ -66,7 +85,9 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None: Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user. """ - client = openai.AsyncOpenAI(api_key=data[CONF_API_KEY]) + client = openai.AsyncOpenAI( + api_key=data[CONF_API_KEY], http_client=get_async_client(hass) + ) await hass.async_add_executor_job(client.with_options(timeout=10.0).models.list) @@ -137,7 +158,16 @@ class OpenAIOptionsFlow(OptionsFlow): if user_input.get(CONF_CHAT_MODEL) in UNSUPPORTED_MODELS: errors[CONF_CHAT_MODEL] = "model_not_supported" - else: + + if user_input.get(CONF_WEB_SEARCH): + if not user_input.get( + CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL + ).startswith("gpt-4o"): + errors[CONF_WEB_SEARCH] = "web_search_not_supported" + elif user_input.get(CONF_WEB_SEARCH_USER_LOCATION): + user_input.update(await self.get_location_data()) + + if not errors: return self.async_create_entry(title="", data=user_input) else: # Re-render the options again, now with the recommended options shown/hidden @@ -156,6 +186,59 @@ class OpenAIOptionsFlow(OptionsFlow): errors=errors, ) + async def get_location_data(self) -> dict[str, str]: + """Get approximate location data of the user.""" + location_data: dict[str, str] = {} + zone_home = self.hass.states.get(ENTITY_ID_HOME) + if zone_home is not None: + client = openai.AsyncOpenAI( + api_key=self.config_entry.data[CONF_API_KEY], + http_client=get_async_client(self.hass), + ) + location_schema = vol.Schema( + { + vol.Optional( + CONF_WEB_SEARCH_CITY, + description="Free text input for the city, e.g. `San Francisco`", + ): str, + vol.Optional( + CONF_WEB_SEARCH_REGION, + description="Free text input for the region, e.g. `California`", + ): str, + } + ) + response = await client.responses.create( + model=RECOMMENDED_CHAT_MODEL, + input=[ + { + "role": "system", + "content": "Where are the following coordinates located: " + f"({zone_home.attributes[ATTR_LATITUDE]}," + f" {zone_home.attributes[ATTR_LONGITUDE]})?", + } + ], + text={ + "format": { + "type": "json_schema", + "name": "approximate_location", + "description": "Approximate location data of the user " + "for refined web search results", + "schema": convert(location_schema), + "strict": False, + } + }, + store=False, + ) + location_data = location_schema(json.loads(response.output_text) or {}) + + if self.hass.config.country: + location_data[CONF_WEB_SEARCH_COUNTRY] = self.hass.config.country + location_data[CONF_WEB_SEARCH_TIMEZONE] = self.hass.config.time_zone + + _LOGGER.debug("Location data: %s", location_data) + + return location_data + def openai_config_option_schema( hass: HomeAssistant, @@ -227,10 +310,35 @@ def openai_config_option_schema( ): SelectSelector( SelectSelectorConfig( options=["low", "medium", "high"], - translation_key="reasoning_effort", + translation_key=CONF_REASONING_EFFORT, mode=SelectSelectorMode.DROPDOWN, ) ), + vol.Optional( + CONF_WEB_SEARCH, + description={"suggested_value": options.get(CONF_WEB_SEARCH)}, + default=RECOMMENDED_WEB_SEARCH, + ): bool, + vol.Optional( + CONF_WEB_SEARCH_CONTEXT_SIZE, + description={ + "suggested_value": options.get(CONF_WEB_SEARCH_CONTEXT_SIZE) + }, + default=RECOMMENDED_WEB_SEARCH_CONTEXT_SIZE, + ): SelectSelector( + SelectSelectorConfig( + options=["low", "medium", "high"], + translation_key=CONF_WEB_SEARCH_CONTEXT_SIZE, + mode=SelectSelectorMode.DROPDOWN, + ) + ), + vol.Optional( + CONF_WEB_SEARCH_USER_LOCATION, + description={ + "suggested_value": options.get(CONF_WEB_SEARCH_USER_LOCATION) + }, + default=RECOMMENDED_WEB_SEARCH_USER_LOCATION, + ): bool, } ) return schema diff --git a/homeassistant/components/openai_conversation/const.py b/homeassistant/components/openai_conversation/const.py index c9987cb81b9..41abc504219 100644 --- a/homeassistant/components/openai_conversation/const.py +++ b/homeassistant/components/openai_conversation/const.py @@ -14,11 +14,21 @@ CONF_REASONING_EFFORT = "reasoning_effort" CONF_RECOMMENDED = "recommended" CONF_TEMPERATURE = "temperature" CONF_TOP_P = "top_p" +CONF_WEB_SEARCH = "web_search" +CONF_WEB_SEARCH_USER_LOCATION = "user_location" +CONF_WEB_SEARCH_CONTEXT_SIZE = "search_context_size" +CONF_WEB_SEARCH_CITY = "city" +CONF_WEB_SEARCH_REGION = "region" +CONF_WEB_SEARCH_COUNTRY = "country" +CONF_WEB_SEARCH_TIMEZONE = "timezone" RECOMMENDED_CHAT_MODEL = "gpt-4o-mini" RECOMMENDED_MAX_TOKENS = 150 RECOMMENDED_REASONING_EFFORT = "low" RECOMMENDED_TEMPERATURE = 1.0 RECOMMENDED_TOP_P = 1.0 +RECOMMENDED_WEB_SEARCH = False +RECOMMENDED_WEB_SEARCH_CONTEXT_SIZE = "medium" +RECOMMENDED_WEB_SEARCH_USER_LOCATION = False UNSUPPORTED_MODELS: list[str] = [ "o1-mini", diff --git a/homeassistant/components/openai_conversation/conversation.py b/homeassistant/components/openai_conversation/conversation.py index 32ac20b2680..026e18f3ce1 100644 --- a/homeassistant/components/openai_conversation/conversation.py +++ b/homeassistant/components/openai_conversation/conversation.py @@ -10,18 +10,23 @@ from openai.types.responses import ( EasyInputMessageParam, FunctionToolParam, ResponseCompletedEvent, + ResponseErrorEvent, + ResponseFailedEvent, ResponseFunctionCallArgumentsDeltaEvent, ResponseFunctionCallArgumentsDoneEvent, ResponseFunctionToolCall, ResponseFunctionToolCallParam, + ResponseIncompleteEvent, ResponseInputParam, ResponseOutputItemAddedEvent, ResponseOutputMessage, ResponseStreamEvent, ResponseTextDeltaEvent, ToolParam, + WebSearchToolParam, ) from openai.types.responses.response_input_param import FunctionCallOutput +from openai.types.responses.web_search_tool_param import UserLocation from voluptuous_openapi import convert from homeassistant.components import assist_pipeline, conversation @@ -40,6 +45,13 @@ from .const import ( CONF_REASONING_EFFORT, CONF_TEMPERATURE, CONF_TOP_P, + CONF_WEB_SEARCH, + CONF_WEB_SEARCH_CITY, + CONF_WEB_SEARCH_CONTEXT_SIZE, + CONF_WEB_SEARCH_COUNTRY, + CONF_WEB_SEARCH_REGION, + CONF_WEB_SEARCH_TIMEZONE, + CONF_WEB_SEARCH_USER_LOCATION, DOMAIN, LOGGER, RECOMMENDED_CHAT_MODEL, @@ -47,6 +59,7 @@ from .const import ( RECOMMENDED_REASONING_EFFORT, RECOMMENDED_TEMPERATURE, RECOMMENDED_TOP_P, + RECOMMENDED_WEB_SEARCH_CONTEXT_SIZE, ) # Max number of back and forth with the LLM to generate a response @@ -139,18 +152,57 @@ async def _transform_stream( ) ] } - elif ( - isinstance(event, ResponseCompletedEvent) - and (usage := event.response.usage) is not None - ): - chat_log.async_trace( - { - "stats": { - "input_tokens": usage.input_tokens, - "output_tokens": usage.output_tokens, + elif isinstance(event, ResponseCompletedEvent): + if event.response.usage is not None: + chat_log.async_trace( + { + "stats": { + "input_tokens": event.response.usage.input_tokens, + "output_tokens": event.response.usage.output_tokens, + } } - } - ) + ) + elif isinstance(event, ResponseIncompleteEvent): + if event.response.usage is not None: + chat_log.async_trace( + { + "stats": { + "input_tokens": event.response.usage.input_tokens, + "output_tokens": event.response.usage.output_tokens, + } + } + ) + + if ( + event.response.incomplete_details + and event.response.incomplete_details.reason + ): + reason: str = event.response.incomplete_details.reason + else: + reason = "unknown reason" + + if reason == "max_output_tokens": + reason = "max output tokens reached" + elif reason == "content_filter": + reason = "content filter triggered" + + raise HomeAssistantError(f"OpenAI response incomplete: {reason}") + elif isinstance(event, ResponseFailedEvent): + if event.response.usage is not None: + chat_log.async_trace( + { + "stats": { + "input_tokens": event.response.usage.input_tokens, + "output_tokens": event.response.usage.output_tokens, + } + } + ) + reason = "unknown reason" + if event.response.error is not None: + reason = event.response.error.message + raise HomeAssistantError(f"OpenAI response failed: {reason}") + elif isinstance(event, ResponseErrorEvent): + raise HomeAssistantError(f"OpenAI response error: {event.message}") class OpenAIConversationEntity( @@ -223,6 +275,25 @@ class OpenAIConversationEntity( for tool in chat_log.llm_api.tools ] + if options.get(CONF_WEB_SEARCH): + web_search = WebSearchToolParam( + type="web_search_preview", + search_context_size=options.get( + CONF_WEB_SEARCH_CONTEXT_SIZE, RECOMMENDED_WEB_SEARCH_CONTEXT_SIZE + ), + ) + if options.get(CONF_WEB_SEARCH_USER_LOCATION): + web_search["user_location"] = UserLocation( + type="approximate", + city=options.get(CONF_WEB_SEARCH_CITY, ""), + region=options.get(CONF_WEB_SEARCH_REGION, ""), + country=options.get(CONF_WEB_SEARCH_COUNTRY, ""), + timezone=options.get(CONF_WEB_SEARCH_TIMEZONE, ""), + ) + if tools is None: + tools = [] + tools.append(web_search) + model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL) messages = [ m diff --git a/homeassistant/components/openai_conversation/strings.json b/homeassistant/components/openai_conversation/strings.json index c9d7ee112bd..91c1c475bd6 100644 --- a/homeassistant/components/openai_conversation/strings.json +++ b/homeassistant/components/openai_conversation/strings.json @@ -24,16 +24,23 @@ "top_p": "Top P", "llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]", "recommended": "Recommended model settings", - "reasoning_effort": "Reasoning effort" + "reasoning_effort": "Reasoning effort", + "web_search": "Enable web search", + "search_context_size": "Search context size", + "user_location": "Include home location" }, "data_description": { "prompt": "Instruct how the LLM should respond. This can be a template.", - "reasoning_effort": "How many reasoning tokens the model should generate before creating a response to the prompt (for certain reasoning models)" + "reasoning_effort": "How many reasoning tokens the model should generate before creating a response to the prompt (for certain reasoning models)", + "web_search": "Allow the model to search the web for the latest information before generating a response", + "search_context_size": "High level guidance for the amount of context window space to use for the search", + "user_location": "Refine search results based on geography" } } }, "error": { - "model_not_supported": "This model is not supported, please select a different model" + "model_not_supported": "This model is not supported, please select a different model", + "web_search_not_supported": "Web search is only supported for gpt-4o and gpt-4o-mini models" } }, "selector": { @@ -43,6 +50,13 @@ "medium": "Medium", "high": "High" } + }, + "search_context_size": { + "options": { + "low": "Low", + "medium": "Medium", + "high": "High" + } } }, "services": { @@ -75,7 +89,7 @@ }, "generate_content": { "name": "Generate content", - "description": "Sends a conversational query to ChatGPT including any attached image files", + "description": "Sends a conversational query to ChatGPT including any attached image or PDF files", "fields": { "config_entry": { "name": "Config entry", diff --git a/homeassistant/components/openweathermap/sensor.py b/homeassistant/components/openweathermap/sensor.py index 0afab69b638..a595652d90b 100644 --- a/homeassistant/components/openweathermap/sensor.py +++ b/homeassistant/components/openweathermap/sensor.py @@ -89,7 +89,8 @@ WEATHER_SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( key=ATTR_API_WIND_BEARING, name="Wind bearing", native_unit_of_measurement=DEGREE, - state_class=SensorStateClass.MEASUREMENT, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + device_class=SensorDeviceClass.WIND_DIRECTION, ), SensorEntityDescription( key=ATTR_API_HUMIDITY, diff --git a/homeassistant/components/opower/coordinator.py b/homeassistant/components/opower/coordinator.py index aed89ccf46e..e8b6dbf9718 100644 --- a/homeassistant/components/opower/coordinator.py +++ b/homeassistant/components/opower/coordinator.py @@ -16,7 +16,11 @@ from opower import ( from opower.exceptions import ApiException, CannotConnect, InvalidAuth from homeassistant.components.recorder import get_instance -from homeassistant.components.recorder.models import StatisticData, StatisticMetaData +from homeassistant.components.recorder.models import ( + StatisticData, + StatisticMeanType, + StatisticMetaData, +) from homeassistant.components.recorder.statistics import ( async_add_external_statistics, get_last_statistics, @@ -201,7 +205,7 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): f"{account.meter_type.name.lower()} {account.utility_account_id}" ) cost_metadata = StatisticMetaData( - has_mean=False, + mean_type=StatisticMeanType.NONE, has_sum=True, name=f"{name_prefix} cost", source=DOMAIN, @@ -209,7 +213,7 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): unit_of_measurement=None, ) consumption_metadata = StatisticMetaData( - has_mean=False, + mean_type=StatisticMeanType.NONE, has_sum=True, name=f"{name_prefix} consumption", source=DOMAIN, diff --git a/homeassistant/components/opower/strings.json b/homeassistant/components/opower/strings.json index 362e6cd7596..749545743fe 100644 --- a/homeassistant/components/opower/strings.json +++ b/homeassistant/components/opower/strings.json @@ -11,7 +11,7 @@ "mfa": { "description": "The TOTP secret below is not one of the 6 digit time-based numeric codes. It is a string of around 16 characters containing the shared secret that enables your authenticator app to generate the correct time-based code at the appropriate time. See the documentation.", "data": { - "totp_secret": "TOTP Secret" + "totp_secret": "TOTP secret" } }, "reauth_confirm": { @@ -19,7 +19,7 @@ "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", - "totp_secret": "TOTP Secret" + "totp_secret": "[%key:component::opower::config::step::mfa::data::totp_secret%]" } } }, diff --git a/homeassistant/components/osoenergy/strings.json b/homeassistant/components/osoenergy/strings.json index 7e10168d941..ef7e2abb89b 100644 --- a/homeassistant/components/osoenergy/strings.json +++ b/homeassistant/components/osoenergy/strings.json @@ -60,7 +60,7 @@ "ffr": "Fast frequency reserve", "legionella": "Legionella", "manual": "Manual", - "off": "Off", + "off": "[%key:common::state::off%]", "powersave": "Power save", "voltage": "Voltage" } @@ -70,7 +70,7 @@ "state": { "advanced": "Advanced", "gridcompany": "Grid company", - "off": "Off", + "off": "[%key:common::state::off%]", "oso": "OSO", "smartcompany": "Smart company" } diff --git a/homeassistant/components/overkiz/manifest.json b/homeassistant/components/overkiz/manifest.json index cfaed4ceb8b..937b4ccb937 100644 --- a/homeassistant/components/overkiz/manifest.json +++ b/homeassistant/components/overkiz/manifest.json @@ -13,7 +13,7 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["boto3", "botocore", "pyhumps", "pyoverkiz", "s3transfer"], - "requirements": ["pyoverkiz==1.16.4"], + "requirements": ["pyoverkiz==1.16.5"], "zeroconf": [ { "type": "_kizbox._tcp.local.", diff --git a/homeassistant/components/overkiz/sensor.py b/homeassistant/components/overkiz/sensor.py index 9214398a37b..cec0d0d2571 100644 --- a/homeassistant/components/overkiz/sensor.py +++ b/homeassistant/components/overkiz/sensor.py @@ -70,6 +70,15 @@ SENSOR_DESCRIPTIONS: list[OverkizSensorDescription] = [ options=["full", "normal", "medium", "low", "verylow"], translation_key="battery", ), + OverkizSensorDescription( + key=OverkizState.CORE_BATTERY_DISCRETE_LEVEL, + name="Battery", + entity_category=EntityCategory.DIAGNOSTIC, + icon="mdi:battery", + device_class=SensorDeviceClass.ENUM, + options=["good", "medium", "low", "critical"], + translation_key="battery", + ), OverkizSensorDescription( key=OverkizState.CORE_RSSI_LEVEL, name="RSSI level", diff --git a/homeassistant/components/overkiz/strings.json b/homeassistant/components/overkiz/strings.json index 0c564a003d6..05b5eac4b21 100644 --- a/homeassistant/components/overkiz/strings.json +++ b/homeassistant/components/overkiz/strings.json @@ -123,7 +123,9 @@ "low": "Low", "normal": "Normal", "medium": "Medium", - "verylow": "Very low" + "verylow": "Very low", + "good": "Good", + "critical": "Critical" } }, "discrete_rssi_level": { diff --git a/homeassistant/components/overkiz/water_heater/__init__.py b/homeassistant/components/overkiz/water_heater/__init__.py index 9895ea84c2c..2960cefe10c 100644 --- a/homeassistant/components/overkiz/water_heater/__init__.py +++ b/homeassistant/components/overkiz/water_heater/__init__.py @@ -13,6 +13,9 @@ from ..entity import OverkizEntity from .atlantic_domestic_hot_water_production_mlb_component import ( AtlanticDomesticHotWaterProductionMBLComponent, ) +from .atlantic_domestic_hot_water_production_v2_io_component import ( + AtlanticDomesticHotWaterProductionV2IOComponent, +) from .atlantic_pass_apc_dhw import AtlanticPassAPCDHW from .domestic_hot_water_production import DomesticHotWaterProduction from .hitachi_dhw import HitachiDHW @@ -52,4 +55,5 @@ WIDGET_TO_WATER_HEATER_ENTITY = { CONTROLLABLE_NAME_TO_WATER_HEATER_ENTITY = { "modbuslink:AtlanticDomesticHotWaterProductionMBLComponent": AtlanticDomesticHotWaterProductionMBLComponent, + "io:AtlanticDomesticHotWaterProductionV2_CV4E_IOComponent": AtlanticDomesticHotWaterProductionV2IOComponent, } diff --git a/homeassistant/components/overkiz/water_heater/atlantic_domestic_hot_water_production_v2_io_component.py b/homeassistant/components/overkiz/water_heater/atlantic_domestic_hot_water_production_v2_io_component.py new file mode 100644 index 00000000000..7e7db07f847 --- /dev/null +++ b/homeassistant/components/overkiz/water_heater/atlantic_domestic_hot_water_production_v2_io_component.py @@ -0,0 +1,332 @@ +"""Support for AtlanticDomesticHotWaterProductionV2IOComponent.""" + +from typing import Any, cast + +from pyoverkiz.enums import OverkizCommand, OverkizCommandParam, OverkizState + +from homeassistant.components.water_heater import ( + STATE_ECO, + STATE_ELECTRIC, + STATE_HEAT_PUMP, + STATE_PERFORMANCE, + WaterHeaterEntity, + WaterHeaterEntityFeature, +) +from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature + +from ..entity import OverkizEntity + +DEFAULT_MIN_TEMP: float = 50.0 +DEFAULT_MAX_TEMP: float = 62.0 +MAX_BOOST_MODE_DURATION: int = 7 + +DHWP_AWAY_MODES = [ + OverkizCommandParam.ABSENCE, + OverkizCommandParam.AWAY, + OverkizCommandParam.FROSTPROTECTION, +] + + +class AtlanticDomesticHotWaterProductionV2IOComponent(OverkizEntity, WaterHeaterEntity): + """Representation of AtlanticDomesticHotWaterProductionV2IOComponent (io).""" + + _attr_temperature_unit = UnitOfTemperature.CELSIUS + _attr_supported_features = ( + WaterHeaterEntityFeature.TARGET_TEMPERATURE + | WaterHeaterEntityFeature.OPERATION_MODE + | WaterHeaterEntityFeature.AWAY_MODE + | WaterHeaterEntityFeature.ON_OFF + ) + _attr_operation_list = [ + STATE_ECO, + STATE_PERFORMANCE, + STATE_HEAT_PUMP, + STATE_ELECTRIC, + ] + + @property + def min_temp(self) -> float: + """Return the minimum temperature.""" + + min_temp = self.device.states[OverkizState.CORE_MINIMAL_TEMPERATURE_MANUAL_MODE] + if min_temp: + return cast(float, min_temp.value_as_float) + return DEFAULT_MIN_TEMP + + @property + def max_temp(self) -> float: + """Return the maximum temperature.""" + + max_temp = self.device.states[OverkizState.CORE_MAXIMAL_TEMPERATURE_MANUAL_MODE] + if max_temp: + return cast(float, max_temp.value_as_float) + return DEFAULT_MAX_TEMP + + @property + def current_temperature(self) -> float: + """Return the current temperature.""" + + return cast( + float, + self.executor.select_state( + OverkizState.IO_MIDDLE_WATER_TEMPERATURE, + ), + ) + + @property + def target_temperature(self) -> float: + """Return the temperature corresponding to the PRESET.""" + + return cast( + float, + self.executor.select_state(OverkizState.CORE_TARGET_TEMPERATURE), + ) + + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set new temperature.""" + + temperature = kwargs.get(ATTR_TEMPERATURE) + await self.executor.async_execute_command( + OverkizCommand.SET_TARGET_TEMPERATURE, temperature, refresh_afterwards=False + ) + await self.executor.async_execute_command( + OverkizCommand.REFRESH_TARGET_TEMPERATURE, refresh_afterwards=False + ) + await self.coordinator.async_refresh() + + @property + def is_state_eco(self) -> bool: + """Return true if eco mode is on.""" + + return ( + self.executor.select_state(OverkizState.IO_DHW_MODE) + == OverkizCommandParam.MANUAL_ECO_ACTIVE + ) + + @property + def is_state_performance(self) -> bool: + """Return true if performance mode is on.""" + + return ( + self.executor.select_state(OverkizState.IO_DHW_MODE) + == OverkizCommandParam.AUTO_MODE + ) + + @property + def is_state_heat_pump(self) -> bool: + """Return true if heat pump mode is on.""" + + return ( + self.executor.select_state(OverkizState.IO_DHW_MODE) + == OverkizCommandParam.MANUAL_ECO_INACTIVE + ) + + @property + def is_away_mode_on(self) -> bool: + """Return true if away mode is on.""" + + away_mode_duration = cast( + str, self.executor.select_state(OverkizState.IO_AWAY_MODE_DURATION) + ) + # away_mode_duration can be either a Literal["always"] + if away_mode_duration == OverkizCommandParam.ALWAYS: + return True + + # Or an int of 0 to 7 days. But it still is a string. + if away_mode_duration.isdecimal() and int(away_mode_duration) > 0: + return True + + return False + + @property + def current_operation(self) -> str | None: + """Return current operation.""" + + # The Away Mode leaves the current operation unchanged + if self.is_boost_mode_on: + return STATE_ELECTRIC + + if self.is_state_eco: + return STATE_ECO + + if self.is_state_performance: + return STATE_PERFORMANCE + + if self.is_state_heat_pump: + return STATE_HEAT_PUMP + + return None + + @property + def is_boost_mode_on(self) -> bool: + """Return true if boost mode is on.""" + + return ( + cast( + int, + self.executor.select_state(OverkizState.CORE_BOOST_MODE_DURATION), + ) + > 0 + ) + + async def async_set_operation_mode(self, operation_mode: str) -> None: + """Set new operation mode.""" + + if operation_mode == STATE_ECO: + if self.is_boost_mode_on: + await self.async_turn_boost_mode_off(refresh_afterwards=False) + + if self.is_away_mode_on: + await self.async_turn_away_mode_off(refresh_afterwards=False) + + await self.executor.async_execute_command( + OverkizCommand.SET_DHW_MODE, + OverkizCommandParam.MANUAL_ECO_ACTIVE, + refresh_afterwards=False, + ) + # ECO changes the target temperature so we have to refresh it + await self.executor.async_execute_command( + OverkizCommand.REFRESH_TARGET_TEMPERATURE, refresh_afterwards=False + ) + await self.coordinator.async_refresh() + + elif operation_mode == STATE_PERFORMANCE: + if self.is_boost_mode_on: + await self.async_turn_boost_mode_off(refresh_afterwards=False) + if self.is_away_mode_on: + await self.async_turn_away_mode_off(refresh_afterwards=False) + + await self.executor.async_execute_command( + OverkizCommand.SET_DHW_MODE, + OverkizCommandParam.AUTO_MODE, + refresh_afterwards=False, + ) + + await self.coordinator.async_refresh() + + elif operation_mode == STATE_HEAT_PUMP: + refresh_target_temp = False + if self.is_state_performance: + # Switching from STATE_PERFORMANCE to STATE_HEAT_PUMP + # changes the target temperature and requires a target temperature refresh + refresh_target_temp = True + + if self.is_boost_mode_on: + await self.async_turn_boost_mode_off(refresh_afterwards=False) + if self.is_away_mode_on: + await self.async_turn_away_mode_off(refresh_afterwards=False) + + await self.executor.async_execute_command( + OverkizCommand.SET_DHW_MODE, + OverkizCommandParam.MANUAL_ECO_INACTIVE, + refresh_afterwards=False, + ) + + if refresh_target_temp: + await self.executor.async_execute_command( + OverkizCommand.REFRESH_TARGET_TEMPERATURE, + refresh_afterwards=False, + ) + + await self.coordinator.async_refresh() + + elif operation_mode == STATE_ELECTRIC: + if self.is_away_mode_on: + await self.async_turn_away_mode_off(refresh_afterwards=False) + if not self.is_boost_mode_on: + await self.async_turn_boost_mode_on(refresh_afterwards=False) + await self.coordinator.async_refresh() + + async def async_turn_away_mode_on(self, refresh_afterwards: bool = True) -> None: + """Turn away mode on.""" + + await self.executor.async_execute_command( + OverkizCommand.SET_CURRENT_OPERATING_MODE, + { + OverkizCommandParam.RELAUNCH: OverkizCommandParam.OFF, + OverkizCommandParam.ABSENCE: OverkizCommandParam.ON, + }, + refresh_afterwards=False, + ) + # Toggling the AWAY mode changes away mode duration so we have to refresh it + await self.executor.async_execute_command( + OverkizCommand.REFRESH_AWAY_MODE_DURATION, + refresh_afterwards=False, + ) + if refresh_afterwards: + await self.coordinator.async_refresh() + + async def async_turn_away_mode_off(self, refresh_afterwards: bool = True) -> None: + """Turn away mode off.""" + + await self.executor.async_execute_command( + OverkizCommand.SET_CURRENT_OPERATING_MODE, + { + OverkizCommandParam.RELAUNCH: OverkizCommandParam.OFF, + OverkizCommandParam.ABSENCE: OverkizCommandParam.OFF, + }, + refresh_afterwards=False, + ) + # Toggling the AWAY mode changes away mode duration so we have to refresh it + await self.executor.async_execute_command( + OverkizCommand.REFRESH_AWAY_MODE_DURATION, + refresh_afterwards=False, + ) + if refresh_afterwards: + await self.coordinator.async_refresh() + + async def async_turn_boost_mode_on(self, refresh_afterwards: bool = True) -> None: + """Turn boost mode on.""" + + refresh_target_temp = False + if self.is_state_performance: + # Switching from STATE_PERFORMANCE to BOOST requires a target temperature refresh + refresh_target_temp = True + + await self.executor.async_execute_command( + OverkizCommand.SET_BOOST_MODE_DURATION, + MAX_BOOST_MODE_DURATION, + refresh_afterwards=False, + ) + + await self.executor.async_execute_command( + OverkizCommand.SET_CURRENT_OPERATING_MODE, + { + OverkizCommandParam.RELAUNCH: OverkizCommandParam.ON, + OverkizCommandParam.ABSENCE: OverkizCommandParam.OFF, + }, + refresh_afterwards=False, + ) + + await self.executor.async_execute_command( + OverkizCommand.REFRESH_BOOST_MODE_DURATION, + refresh_afterwards=False, + ) + + if refresh_target_temp: + await self.executor.async_execute_command( + OverkizCommand.REFRESH_TARGET_TEMPERATURE, refresh_afterwards=False + ) + + if refresh_afterwards: + await self.coordinator.async_refresh() + + async def async_turn_boost_mode_off(self, refresh_afterwards: bool = True) -> None: + """Turn boost mode off.""" + + await self.executor.async_execute_command( + OverkizCommand.SET_CURRENT_OPERATING_MODE, + { + OverkizCommandParam.RELAUNCH: OverkizCommandParam.OFF, + OverkizCommandParam.ABSENCE: OverkizCommandParam.OFF, + }, + refresh_afterwards=False, + ) + # Toggling the BOOST mode changes boost mode duration so we have to refresh it + await self.executor.async_execute_command( + OverkizCommand.REFRESH_BOOST_MODE_DURATION, + refresh_afterwards=False, + ) + + if refresh_afterwards: + await self.coordinator.async_refresh() diff --git a/homeassistant/components/palazzetti/strings.json b/homeassistant/components/palazzetti/strings.json index 501ee777fe9..7a6c47796df 100644 --- a/homeassistant/components/palazzetti/strings.json +++ b/homeassistant/components/palazzetti/strings.json @@ -74,7 +74,7 @@ "status": { "name": "Status", "state": { - "off": "Off", + "off": "[%key:common::state::off%]", "off_timer": "Timer-regulated switch off", "test_fire": "Ignition test", "heatup": "Pellet feed", @@ -83,7 +83,7 @@ "burning": "Operating", "burning_mod": "Operating - Modulating", "unknown": "Unknown", - "cool_fluid": "Stand-by", + "cool_fluid": "[%key:common::state::standby%]", "fire_stop": "Switch off", "clean_fire": "Burn pot cleaning", "cooling": "Cooling in progress", diff --git a/homeassistant/components/pglab/cover.py b/homeassistant/components/pglab/cover.py new file mode 100644 index 00000000000..8385fd95ffa --- /dev/null +++ b/homeassistant/components/pglab/cover.py @@ -0,0 +1,107 @@ +"""PG LAB Electronics Cover.""" + +from __future__ import annotations + +from typing import Any + +from pypglab.device import Device as PyPGLabDevice +from pypglab.shutter import Shutter as PyPGLabShutter + +from homeassistant.components.cover import ( + CoverDeviceClass, + CoverEntity, + CoverEntityFeature, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .discovery import PGLabDiscovery +from .entity import PGLabEntity + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up switches for device.""" + + @callback + def async_discover( + pglab_device: PyPGLabDevice, pglab_shutter: PyPGLabShutter + ) -> None: + """Discover and add a PG LAB Cover.""" + pglab_discovery = config_entry.runtime_data + pglab_cover = PGLabCover(pglab_discovery, pglab_device, pglab_shutter) + async_add_entities([pglab_cover]) + + # Register the callback to create the cover entity when discovered. + pglab_discovery = config_entry.runtime_data + await pglab_discovery.register_platform(hass, Platform.COVER, async_discover) + + +class PGLabCover(PGLabEntity, CoverEntity): + """A PGLab Cover.""" + + _attr_translation_key = "shutter" + + def __init__( + self, + pglab_discovery: PGLabDiscovery, + pglab_device: PyPGLabDevice, + pglab_shutter: PyPGLabShutter, + ) -> None: + """Initialize the Cover class.""" + + super().__init__( + pglab_discovery, + pglab_device, + pglab_shutter, + ) + + self._attr_unique_id = f"{pglab_device.id}_shutter{pglab_shutter.id}" + self._attr_translation_placeholders = {"shutter_id": pglab_shutter.id} + + self._shutter = pglab_shutter + + self._attr_device_class = CoverDeviceClass.SHUTTER + self._attr_supported_features = ( + CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE | CoverEntityFeature.STOP + ) + + async def async_open_cover(self, **kwargs: Any) -> None: + """Open the cover.""" + await self._shutter.open() + + async def async_close_cover(self, **kwargs: Any) -> None: + """Close cover.""" + await self._shutter.close() + + async def async_stop_cover(self, **kwargs: Any) -> None: + """Stop the cover.""" + await self._shutter.stop() + + @property + def is_closed(self) -> bool | None: + """Return if cover is closed.""" + if not self._shutter.state: + return None + return self._shutter.state == PyPGLabShutter.STATE_CLOSED + + @property + def is_closing(self) -> bool | None: + """Return if the cover is closing.""" + if not self._shutter.state: + return None + return self._shutter.state == PyPGLabShutter.STATE_CLOSING + + @property + def is_opening(self) -> bool | None: + """Return if the cover is opening.""" + if not self._shutter.state: + return None + return self._shutter.state == PyPGLabShutter.STATE_OPENING diff --git a/homeassistant/components/pglab/discovery.py b/homeassistant/components/pglab/discovery.py index e34f80a2e2d..c1d8653c17b 100644 --- a/homeassistant/components/pglab/discovery.py +++ b/homeassistant/components/pglab/discovery.py @@ -34,12 +34,14 @@ if TYPE_CHECKING: # Supported platforms. PLATFORMS = [ + Platform.COVER, Platform.SENSOR, Platform.SWITCH, ] # Used to create a new component entity. CREATE_NEW_ENTITY = { + Platform.COVER: "pglab_create_new_entity_cover", Platform.SENSOR: "pglab_create_new_entity_sensor", Platform.SWITCH: "pglab_create_new_entity_switch", } @@ -250,6 +252,13 @@ class PGLabDiscovery: ) self._discovered[pglab_device.id] = discovery_info + # Create all new cover entities. + for s in pglab_device.shutters: + # the HA entity is not yet created, send a message to create it + async_dispatcher_send( + hass, CREATE_NEW_ENTITY[Platform.COVER], pglab_device, s + ) + # Create all new relay entities. for r in pglab_device.relays: # The HA entity is not yet created, send a message to create it. diff --git a/homeassistant/components/pglab/strings.json b/homeassistant/components/pglab/strings.json index 4fad408ad98..c6f80d12f09 100644 --- a/homeassistant/components/pglab/strings.json +++ b/homeassistant/components/pglab/strings.json @@ -15,6 +15,11 @@ } }, "entity": { + "cover": { + "shutter": { + "name": "Shutter {shutter_id}" + } + }, "switch": { "relay": { "name": "Relay {relay_id}" diff --git a/homeassistant/components/philips_js/light.py b/homeassistant/components/philips_js/light.py index bf15292335e..87e3323a30c 100644 --- a/homeassistant/components/philips_js/light.py +++ b/homeassistant/components/philips_js/light.py @@ -3,7 +3,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Any +from typing import Any, cast from haphilipsjs import PhilipsTV from haphilipsjs.typing import AmbilightCurrentConfiguration @@ -328,7 +328,7 @@ class PhilipsTVLightEntity(PhilipsJsEntity, LightEntity): """Turn the bulb on.""" brightness = kwargs.get(ATTR_BRIGHTNESS, self.brightness) hs_color = kwargs.get(ATTR_HS_COLOR, self.hs_color) - attr_effect = kwargs.get(ATTR_EFFECT, self.effect) + attr_effect = cast(str, kwargs.get(ATTR_EFFECT, self.effect)) if not self._tv.on: raise HomeAssistantError("TV is not available") diff --git a/homeassistant/components/picnic/manifest.json b/homeassistant/components/picnic/manifest.json index 09f28da39a4..251964c15d0 100644 --- a/homeassistant/components/picnic/manifest.json +++ b/homeassistant/components/picnic/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/picnic", "iot_class": "cloud_polling", "loggers": ["python_picnic_api2"], - "requirements": ["python-picnic-api2==1.2.2"] + "requirements": ["python-picnic-api2==1.2.4"] } diff --git a/homeassistant/components/pitsos/__init__.py b/homeassistant/components/pitsos/__init__.py new file mode 100644 index 00000000000..e49539d8ed2 --- /dev/null +++ b/homeassistant/components/pitsos/__init__.py @@ -0,0 +1 @@ +"""Pitsos virtual integration.""" diff --git a/homeassistant/components/pitsos/manifest.json b/homeassistant/components/pitsos/manifest.json new file mode 100644 index 00000000000..55f5ac7b2fc --- /dev/null +++ b/homeassistant/components/pitsos/manifest.json @@ -0,0 +1,6 @@ +{ + "domain": "pitsos", + "name": "Pitsos", + "integration_type": "virtual", + "supported_by": "home_connect" +} diff --git a/homeassistant/components/plugwise/manifest.json b/homeassistant/components/plugwise/manifest.json index 87878980f2d..3f812c1a63b 100644 --- a/homeassistant/components/plugwise/manifest.json +++ b/homeassistant/components/plugwise/manifest.json @@ -8,6 +8,6 @@ "iot_class": "local_polling", "loggers": ["plugwise"], "quality_scale": "platinum", - "requirements": ["plugwise==1.7.2"], + "requirements": ["plugwise==1.7.3"], "zeroconf": ["_plugwise._tcp.local."] } diff --git a/homeassistant/components/plugwise/strings.json b/homeassistant/components/plugwise/strings.json index d16b38df992..99d501a79b5 100644 --- a/homeassistant/components/plugwise/strings.json +++ b/homeassistant/components/plugwise/strings.json @@ -139,7 +139,7 @@ "select_schedule": { "name": "Thermostat schedule", "state": { - "off": "Off" + "off": "[%key:common::state::off%]" } } }, diff --git a/homeassistant/components/point/__init__.py b/homeassistant/components/point/__init__.py index e446606f191..0f90bd75c9d 100644 --- a/homeassistant/components/point/__init__.py +++ b/homeassistant/components/point/__init__.py @@ -1,7 +1,5 @@ """Support for Minut Point.""" -import asyncio -from dataclasses import dataclass from http import HTTPStatus import logging @@ -29,26 +27,18 @@ from homeassistant.helpers import ( config_validation as cv, ) from homeassistant.helpers.dispatcher import async_dispatcher_send -from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType from . import api -from .const import ( - CONF_WEBHOOK_URL, - DOMAIN, - EVENT_RECEIVED, - POINT_DISCOVERY_NEW, - SCAN_INTERVAL, - SIGNAL_UPDATE_ENTITY, - SIGNAL_WEBHOOK, -) +from .const import CONF_WEBHOOK_URL, DOMAIN, EVENT_RECEIVED, SIGNAL_WEBHOOK +from .coordinator import PointDataUpdateCoordinator _LOGGER = logging.getLogger(__name__) PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR] -type PointConfigEntry = ConfigEntry[PointData] +type PointConfigEntry = ConfigEntry[PointDataUpdateCoordinator] CONFIG_SCHEMA = vol.Schema( { @@ -131,9 +121,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: PointConfigEntry) -> boo point_session = PointSession(auth) - client = MinutPointClient(hass, entry, point_session) - hass.async_create_task(client.update()) - entry.runtime_data = PointData(client) + coordinator = PointDataUpdateCoordinator(hass, point_session) + + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinator await async_setup_webhook(hass, entry, point_session) await hass.config_entries.async_forward_entry_setups( @@ -176,7 +168,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: PointConfigEntry) -> bo if unload_ok := await hass.config_entries.async_unload_platforms( entry, [*PLATFORMS, Platform.ALARM_CONTROL_PANEL] ): - session: PointSession = entry.runtime_data.client + session = entry.runtime_data.point if CONF_WEBHOOK_ID in entry.data: webhook.async_unregister(hass, entry.data[CONF_WEBHOOK_ID]) await session.remove_webhook() @@ -197,87 +189,3 @@ async def handle_webhook( data["webhook_id"] = webhook_id async_dispatcher_send(hass, SIGNAL_WEBHOOK, data, data.get("hook_id")) hass.bus.async_fire(EVENT_RECEIVED, data) - - -class MinutPointClient: - """Get the latest data and update the states.""" - - def __init__( - self, hass: HomeAssistant, config_entry: ConfigEntry, session: PointSession - ) -> None: - """Initialize the Minut data object.""" - self._known_devices: set[str] = set() - self._known_homes: set[str] = set() - self._hass = hass - self._config_entry = config_entry - self._is_available = True - self._client = session - - async_track_time_interval(self._hass, self.update, SCAN_INTERVAL) - - async def update(self, *args): - """Periodically poll the cloud for current state.""" - await self._sync() - - async def _sync(self): - """Update local list of devices.""" - if not await self._client.update(): - self._is_available = False - _LOGGER.warning("Device is unavailable") - async_dispatcher_send(self._hass, SIGNAL_UPDATE_ENTITY) - return - - self._is_available = True - for home_id in self._client.homes: - if home_id not in self._known_homes: - async_dispatcher_send( - self._hass, - POINT_DISCOVERY_NEW.format(Platform.ALARM_CONTROL_PANEL), - home_id, - ) - self._known_homes.add(home_id) - for device in self._client.devices: - if device.device_id not in self._known_devices: - for platform in PLATFORMS: - async_dispatcher_send( - self._hass, - POINT_DISCOVERY_NEW.format(platform), - device.device_id, - ) - self._known_devices.add(device.device_id) - async_dispatcher_send(self._hass, SIGNAL_UPDATE_ENTITY) - - def device(self, device_id): - """Return device representation.""" - return self._client.device(device_id) - - def is_available(self, device_id): - """Return device availability.""" - if not self._is_available: - return False - return device_id in self._client.device_ids - - async def remove_webhook(self): - """Remove the session webhook.""" - return await self._client.remove_webhook() - - @property - def homes(self): - """Return known homes.""" - return self._client.homes - - async def async_alarm_disarm(self, home_id): - """Send alarm disarm command.""" - return await self._client.alarm_disarm(home_id) - - async def async_alarm_arm(self, home_id): - """Send alarm arm command.""" - return await self._client.alarm_arm(home_id) - - -@dataclass -class PointData: - """Point Data.""" - - client: MinutPointClient - entry_lock: asyncio.Lock = asyncio.Lock() diff --git a/homeassistant/components/point/alarm_control_panel.py b/homeassistant/components/point/alarm_control_panel.py index 0f501d2ee09..fa56bf70546 100644 --- a/homeassistant/components/point/alarm_control_panel.py +++ b/homeassistant/components/point/alarm_control_panel.py @@ -2,23 +2,22 @@ from __future__ import annotations -from collections.abc import Callable import logging +from pypoint import PointSession + from homeassistant.components.alarm_control_panel import ( - DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, AlarmControlPanelEntity, AlarmControlPanelEntityFeature, AlarmControlPanelState, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback -from . import MinutPointClient -from .const import DOMAIN as POINT_DOMAIN, POINT_DISCOVERY_NEW, SIGNAL_WEBHOOK +from . import PointConfigEntry +from .const import DOMAIN as POINT_DOMAIN, SIGNAL_WEBHOOK _LOGGER = logging.getLogger(__name__) @@ -32,21 +31,20 @@ EVENT_MAP = { async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: PointConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up a Point's alarm_control_panel based on a config entry.""" + coordinator = config_entry.runtime_data - async def async_discover_home(home_id): + def async_discover_home(home_id: str) -> None: """Discover and add a discovered home.""" - client = config_entry.runtime_data.client - async_add_entities([MinutPointAlarmControl(client, home_id)], True) + async_add_entities([MinutPointAlarmControl(coordinator.point, home_id)]) - async_dispatcher_connect( - hass, - POINT_DISCOVERY_NEW.format(ALARM_CONTROL_PANEL_DOMAIN, POINT_DOMAIN), - async_discover_home, - ) + coordinator.new_home_callback = async_discover_home + + for home_id in coordinator.point.homes: + async_discover_home(home_id) class MinutPointAlarmControl(AlarmControlPanelEntity): @@ -55,12 +53,11 @@ class MinutPointAlarmControl(AlarmControlPanelEntity): _attr_supported_features = AlarmControlPanelEntityFeature.ARM_AWAY _attr_code_arm_required = False - def __init__(self, point_client: MinutPointClient, home_id: str) -> None: + def __init__(self, point: PointSession, home_id: str) -> None: """Initialize the entity.""" - self._client = point_client + self._client = point self._home_id = home_id - self._async_unsub_hook_dispatcher_connect: Callable[[], None] | None = None - self._home = point_client.homes[self._home_id] + self._home = point.homes[self._home_id] self._attr_name = self._home["name"] self._attr_unique_id = f"point.{home_id}" @@ -73,16 +70,10 @@ class MinutPointAlarmControl(AlarmControlPanelEntity): async def async_added_to_hass(self) -> None: """Call when entity is added to HOme Assistant.""" await super().async_added_to_hass() - self._async_unsub_hook_dispatcher_connect = async_dispatcher_connect( - self.hass, SIGNAL_WEBHOOK, self._webhook_event + self.async_on_remove( + async_dispatcher_connect(self.hass, SIGNAL_WEBHOOK, self._webhook_event) ) - async def async_will_remove_from_hass(self) -> None: - """Disconnect dispatcher listener when removed.""" - await super().async_will_remove_from_hass() - if self._async_unsub_hook_dispatcher_connect: - self._async_unsub_hook_dispatcher_connect() - @callback def _webhook_event(self, data, webhook): """Process new event from the webhook.""" @@ -107,12 +98,12 @@ class MinutPointAlarmControl(AlarmControlPanelEntity): async def async_alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" - status = await self._client.async_alarm_disarm(self._home_id) + status = await self._client.alarm_disarm(self._home_id) if status: self._home["alarm_status"] = "off" async def async_alarm_arm_away(self, code: str | None = None) -> None: """Send arm away command.""" - status = await self._client.async_alarm_arm(self._home_id) + status = await self._client.alarm_arm(self._home_id) if status: self._home["alarm_status"] = "on" diff --git a/homeassistant/components/point/binary_sensor.py b/homeassistant/components/point/binary_sensor.py index c9338cb63f2..17fe40b9654 100644 --- a/homeassistant/components/point/binary_sensor.py +++ b/homeassistant/components/point/binary_sensor.py @@ -3,26 +3,27 @@ from __future__ import annotations import logging +from typing import Any from pypoint import EVENTS from homeassistant.components.binary_sensor import ( - DOMAIN as BINARY_SENSOR_DOMAIN, BinarySensorDeviceClass, BinarySensorEntity, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback -from .const import DOMAIN as POINT_DOMAIN, POINT_DISCOVERY_NEW, SIGNAL_WEBHOOK +from . import PointConfigEntry +from .const import SIGNAL_WEBHOOK +from .coordinator import PointDataUpdateCoordinator from .entity import MinutPointEntity _LOGGER = logging.getLogger(__name__) -DEVICES = { +DEVICES: dict[str, Any] = { "alarm": {"icon": "mdi:alarm-bell"}, "battery": {"device_class": BinarySensorDeviceClass.BATTERY}, "button_press": {"icon": "mdi:gesture-tap-button"}, @@ -42,69 +43,60 @@ DEVICES = { async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: PointConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up a Point's binary sensors based on a config entry.""" - async def async_discover_sensor(device_id): + coordinator = config_entry.runtime_data + + def async_discover_sensor(device_id: str) -> None: """Discover and add a discovered sensor.""" - client = config_entry.runtime_data.client async_add_entities( - ( - MinutPointBinarySensor(client, device_id, device_name) - for device_name in DEVICES - if device_name in EVENTS - ), - True, + MinutPointBinarySensor(coordinator, device_id, device_name) + for device_name in DEVICES + if device_name in EVENTS ) - async_dispatcher_connect( - hass, - POINT_DISCOVERY_NEW.format(BINARY_SENSOR_DOMAIN, POINT_DOMAIN), - async_discover_sensor, + coordinator.new_device_callbacks.append(async_discover_sensor) + + async_add_entities( + MinutPointBinarySensor(coordinator, device_id, device_name) + for device_name in DEVICES + if device_name in EVENTS + for device_id in coordinator.point.device_ids ) class MinutPointBinarySensor(MinutPointEntity, BinarySensorEntity): """The platform class required by Home Assistant.""" - def __init__(self, point_client, device_id, device_name): + def __init__( + self, coordinator: PointDataUpdateCoordinator, device_id: str, key: str + ) -> None: """Initialize the binary sensor.""" - super().__init__( - point_client, - device_id, - DEVICES[device_name].get("device_class", device_name), - ) - self._device_name = device_name - self._async_unsub_hook_dispatcher_connect = None - self._events = EVENTS[device_name] - self._attr_unique_id = f"point.{device_id}-{device_name}" - self._attr_icon = DEVICES[self._device_name].get("icon") + self._attr_device_class = DEVICES[key].get("device_class", key) + super().__init__(coordinator, device_id) + self._device_name = key + self._events = EVENTS[key] + self._attr_unique_id = f"point.{device_id}-{key}" + self._attr_icon = DEVICES[key].get("icon") async def async_added_to_hass(self) -> None: """Call when entity is added to HOme Assistant.""" await super().async_added_to_hass() - self._async_unsub_hook_dispatcher_connect = async_dispatcher_connect( - self.hass, SIGNAL_WEBHOOK, self._webhook_event + self.async_on_remove( + async_dispatcher_connect(self.hass, SIGNAL_WEBHOOK, self._webhook_event) ) - async def async_will_remove_from_hass(self) -> None: - """Disconnect dispatcher listener when removed.""" - await super().async_will_remove_from_hass() - if self._async_unsub_hook_dispatcher_connect: - self._async_unsub_hook_dispatcher_connect() - - async def _update_callback(self): + def _handle_coordinator_update(self) -> None: """Update the value of the sensor.""" - if not self.is_updated: - return if self.device_class == BinarySensorDeviceClass.CONNECTIVITY: # connectivity is the other way around. self._attr_is_on = self._events[0] not in self.device.ongoing_events else: self._attr_is_on = self._events[0] in self.device.ongoing_events - self.async_write_ha_state() + super()._handle_coordinator_update() @callback def _webhook_event(self, data, webhook): diff --git a/homeassistant/components/point/coordinator.py b/homeassistant/components/point/coordinator.py new file mode 100644 index 00000000000..c0cb4e27646 --- /dev/null +++ b/homeassistant/components/point/coordinator.py @@ -0,0 +1,70 @@ +"""Define a data update coordinator for Point.""" + +from collections.abc import Callable +from datetime import datetime +import logging +from typing import Any + +from pypoint import PointSession +from tempora.utc import fromtimestamp + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from homeassistant.util.dt import parse_datetime + +from .const import DOMAIN, SCAN_INTERVAL + +_LOGGER = logging.getLogger(__name__) + + +class PointDataUpdateCoordinator(DataUpdateCoordinator[dict[str, dict[str, Any]]]): + """Class to manage fetching Point data from the API.""" + + def __init__(self, hass: HomeAssistant, point: PointSession) -> None: + """Initialize.""" + super().__init__( + hass, + _LOGGER, + name=DOMAIN, + update_interval=SCAN_INTERVAL, + ) + self.point = point + self.device_updates: dict[str, datetime] = {} + self._known_devices: set[str] = set() + self._known_homes: set[str] = set() + self.new_home_callback: Callable[[str], None] | None = None + self.new_device_callbacks: list[Callable[[str], None]] = [] + self.data: dict[str, dict[str, Any]] = {} + + async def _async_update_data(self) -> dict[str, dict[str, Any]]: + if not await self.point.update(): + raise UpdateFailed("Failed to fetch data from Point") + + if new_homes := set(self.point.homes) - self._known_homes: + _LOGGER.debug("Found new homes: %s", new_homes) + for home_id in new_homes: + if self.new_home_callback: + self.new_home_callback(home_id) + self._known_homes.update(new_homes) + + device_ids = {device.device_id for device in self.point.devices} + if new_devices := device_ids - self._known_devices: + _LOGGER.debug("Found new devices: %s", new_devices) + for device_id in new_devices: + for callback in self.new_device_callbacks: + callback(device_id) + self._known_devices.update(new_devices) + + for device in self.point.devices: + last_updated = parse_datetime(device.last_update) + if ( + not last_updated + or device.device_id not in self.device_updates + or self.device_updates[device.device_id] < last_updated + ): + self.device_updates[device.device_id] = last_updated or fromtimestamp(0) + self.data[device.device_id] = { + k: await device.sensor(k) + for k in ("temperature", "humidity", "sound_pressure") + } + return self.data diff --git a/homeassistant/components/point/entity.py b/homeassistant/components/point/entity.py index 5c52e81e6f7..39af7867e97 100644 --- a/homeassistant/components/point/entity.py +++ b/homeassistant/components/point/entity.py @@ -2,31 +2,27 @@ import logging +from pypoint import Device, PointSession + from homeassistant.helpers import device_registry as dr from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.entity import Entity -from homeassistant.util.dt import as_local, parse_datetime, utc_from_timestamp +from homeassistant.helpers.update_coordinator import CoordinatorEntity +from homeassistant.util.dt import as_local -from .const import DOMAIN, SIGNAL_UPDATE_ENTITY +from .const import DOMAIN +from .coordinator import PointDataUpdateCoordinator _LOGGER = logging.getLogger(__name__) -class MinutPointEntity(Entity): +class MinutPointEntity(CoordinatorEntity[PointDataUpdateCoordinator]): """Base Entity used by the sensors.""" - _attr_should_poll = False - - def __init__(self, point_client, device_id, device_class) -> None: + def __init__(self, coordinator: PointDataUpdateCoordinator, device_id: str) -> None: """Initialize the entity.""" - self._async_unsub_dispatcher_connect = None - self._client = point_client - self._id = device_id + super().__init__(coordinator) + self.device_id = device_id self._name = self.device.name - self._attr_device_class = device_class - self._updated = utc_from_timestamp(0) - self._attr_unique_id = f"point.{device_id}-{device_class}" device = self.device.device self._attr_device_info = DeviceInfo( connections={(dr.CONNECTION_NETWORK_MAC, device["device_mac"])}, @@ -37,59 +33,32 @@ class MinutPointEntity(Entity): sw_version=device["firmware"]["installed"], via_device=(DOMAIN, device["home"]), ) - if device_class: - self._attr_name = f"{self._name} {device_class.capitalize()}" - - def __str__(self) -> str: - """Return string representation of device.""" - return f"MinutPoint {self.name}" - - async def async_added_to_hass(self): - """Call when entity is added to hass.""" - _LOGGER.debug("Created device %s", self) - self._async_unsub_dispatcher_connect = async_dispatcher_connect( - self.hass, SIGNAL_UPDATE_ENTITY, self._update_callback - ) - await self._update_callback() - - async def async_will_remove_from_hass(self) -> None: - """Disconnect dispatcher listener when removed.""" - if self._async_unsub_dispatcher_connect: - self._async_unsub_dispatcher_connect() + if self.device_class: + self._attr_name = f"{self._name} {self.device_class.capitalize()}" async def _update_callback(self): """Update the value of the sensor.""" + @property + def client(self) -> PointSession: + """Return the client object.""" + return self.coordinator.point + @property def available(self) -> bool: """Return true if device is not offline.""" - return self._client.is_available(self.device_id) + return super().available and self.device_id in self.client.device_ids @property - def device(self): + def device(self) -> Device: """Return the representation of the device.""" - return self._client.device(self.device_id) - - @property - def device_id(self): - """Return the id of the device.""" - return self._id + return self.client.device(self.device_id) @property def extra_state_attributes(self): """Return status of device.""" attrs = self.device.device_status - attrs["last_heard_from"] = as_local(self.last_update).strftime( - "%Y-%m-%d %H:%M:%S" - ) + attrs["last_heard_from"] = as_local( + self.coordinator.device_updates[self.device_id] + ).strftime("%Y-%m-%d %H:%M:%S") return attrs - - @property - def is_updated(self): - """Return true if sensor have been updated.""" - return self.last_update > self._updated - - @property - def last_update(self): - """Return the last_update time for the device.""" - return parse_datetime(self.device.last_update) diff --git a/homeassistant/components/point/sensor.py b/homeassistant/components/point/sensor.py index c959d09d606..246536d86ab 100644 --- a/homeassistant/components/point/sensor.py +++ b/homeassistant/components/point/sensor.py @@ -5,19 +5,17 @@ from __future__ import annotations import logging from homeassistant.components.sensor import ( - DOMAIN as SENSOR_DOMAIN, SensorDeviceClass, SensorEntity, SensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import PERCENTAGE, UnitOfSoundPressure, UnitOfTemperature from homeassistant.core import HomeAssistant -from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback -from homeassistant.util.dt import parse_datetime +from homeassistant.helpers.typing import StateType -from .const import DOMAIN as POINT_DOMAIN, POINT_DISCOVERY_NEW +from . import PointConfigEntry +from .coordinator import PointDataUpdateCoordinator from .entity import MinutPointEntity _LOGGER = logging.getLogger(__name__) @@ -37,7 +35,7 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( native_unit_of_measurement=PERCENTAGE, ), SensorEntityDescription( - key="sound", + key="sound_pressure", suggested_display_precision=1, device_class=SensorDeviceClass.SOUND_PRESSURE, native_unit_of_measurement=UnitOfSoundPressure.WEIGHTED_DECIBEL_A, @@ -47,26 +45,26 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: PointConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up a Point's sensors based on a config entry.""" - async def async_discover_sensor(device_id): + coordinator = config_entry.runtime_data + + def async_discover_sensor(device_id: str) -> None: """Discover and add a discovered sensor.""" - client = config_entry.runtime_data.client async_add_entities( - [ - MinutPointSensor(client, device_id, description) - for description in SENSOR_TYPES - ], - True, + MinutPointSensor(coordinator, device_id, description) + for description in SENSOR_TYPES ) - async_dispatcher_connect( - hass, - POINT_DISCOVERY_NEW.format(SENSOR_DOMAIN, POINT_DOMAIN), - async_discover_sensor, + coordinator.new_device_callbacks.append(async_discover_sensor) + + async_add_entities( + MinutPointSensor(coordinator, device_id, description) + for device_id in coordinator.data + for description in SENSOR_TYPES ) @@ -74,16 +72,17 @@ class MinutPointSensor(MinutPointEntity, SensorEntity): """The platform class required by Home Assistant.""" def __init__( - self, point_client, device_id, description: SensorEntityDescription + self, + coordinator: PointDataUpdateCoordinator, + device_id: str, + description: SensorEntityDescription, ) -> None: """Initialize the sensor.""" - super().__init__(point_client, device_id, description.device_class) self.entity_description = description + super().__init__(coordinator, device_id) + self._attr_unique_id = f"point.{device_id}-{description.key}" - async def _update_callback(self): - """Update the value of the sensor.""" - _LOGGER.debug("Update sensor value for %s", self) - if self.is_updated: - self._attr_native_value = await self.device.sensor(self.device_class) - self._updated = parse_datetime(self.device.last_update) - self.async_write_ha_state() + @property + def native_value(self) -> StateType: + """Return the state of the sensor.""" + return self.coordinator.data[self.device_id].get(self.entity_description.key) diff --git a/homeassistant/components/private_ble_device/strings.json b/homeassistant/components/private_ble_device/strings.json index c35775a4843..845a5d92bae 100644 --- a/homeassistant/components/private_ble_device/strings.json +++ b/homeassistant/components/private_ble_device/strings.json @@ -14,7 +14,7 @@ "irk_not_valid": "The key does not look like a valid IRK." }, "abort": { - "bluetooth_not_available": "At least one Bluetooth adapter or remote bluetooth proxy must be configured to track Private BLE Devices." + "bluetooth_not_available": "At least one Bluetooth adapter or remote Bluetooth proxy must be configured to track Private BLE Devices." } }, "entity": { diff --git a/homeassistant/components/profilo/__init__.py b/homeassistant/components/profilo/__init__.py new file mode 100644 index 00000000000..5f727b1bc8b --- /dev/null +++ b/homeassistant/components/profilo/__init__.py @@ -0,0 +1 @@ +"""Profilo virtual integration.""" diff --git a/homeassistant/components/profilo/manifest.json b/homeassistant/components/profilo/manifest.json new file mode 100644 index 00000000000..c5671d5be3f --- /dev/null +++ b/homeassistant/components/profilo/manifest.json @@ -0,0 +1,6 @@ +{ + "domain": "profilo", + "name": "Profilo", + "integration_type": "virtual", + "supported_by": "home_connect" +} diff --git a/homeassistant/components/progettihwsw/config_flow.py b/homeassistant/components/progettihwsw/config_flow.py index 2e5ea221dca..8818eff2d81 100644 --- a/homeassistant/components/progettihwsw/config_flow.py +++ b/homeassistant/components/progettihwsw/config_flow.py @@ -1,5 +1,6 @@ """Config flow for ProgettiHWSW Automation integration.""" +import logging from typing import TYPE_CHECKING, Any from ProgettiHWSW.ProgettiHWSWAPI import ProgettiHWSWAPI @@ -11,6 +12,8 @@ from homeassistant.exceptions import HomeAssistantError from .const import DOMAIN +_LOGGER = logging.getLogger(__name__) + DATA_SCHEMA = vol.Schema( {vol.Required("host"): str, vol.Required("port", default=80): int} ) @@ -86,7 +89,8 @@ class ProgettiHWSWConfigFlow(ConfigFlow, domain=DOMAIN): info = await validate_input(self.hass, user_input) except CannotConnect: errors["base"] = "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: user_input.update(info) diff --git a/homeassistant/components/prosegur/strings.json b/homeassistant/components/prosegur/strings.json index 9b9ac45fc85..e5176e96090 100644 --- a/homeassistant/components/prosegur/strings.json +++ b/homeassistant/components/prosegur/strings.json @@ -5,7 +5,7 @@ "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", - "country": "Country" + "country": "[%key:common::config_flow::data::country%]" } }, "choose_contract": { diff --git a/homeassistant/components/proxy/camera.py b/homeassistant/components/proxy/camera.py index f6e909f13d1..47fa9454deb 100644 --- a/homeassistant/components/proxy/camera.py +++ b/homeassistant/components/proxy/camera.py @@ -104,6 +104,15 @@ def _resize_image(image, opts): new_width = opts.max_width (old_width, old_height) = img.size old_size = len(image) + + # If no max_width specified, only apply quality changes if requested + if new_width is None: + if opts.quality is None: + return image + imgbuf = io.BytesIO() + img.save(imgbuf, "JPEG", optimize=True, quality=quality) + return imgbuf.getvalue() + if old_width <= new_width: if opts.quality is None: _LOGGER.debug("Image is smaller-than/equal-to requested width") diff --git a/homeassistant/components/prusalink/strings.json b/homeassistant/components/prusalink/strings.json index 7c6f0bbf2dd..036bd2c9c6e 100644 --- a/homeassistant/components/prusalink/strings.json +++ b/homeassistant/components/prusalink/strings.json @@ -36,7 +36,7 @@ "printing": "Printing", "paused": "[%key:common::state::paused%]", "finished": "Finished", - "stopped": "Stopped", + "stopped": "[%key:common::state::stopped%]", "error": "Error", "attention": "Attention", "ready": "Ready" @@ -85,7 +85,7 @@ "name": "Z-Height" }, "nozzle_diameter": { - "name": "Nozzle Diameter" + "name": "Nozzle diameter" } }, "button": { diff --git a/homeassistant/components/pterodactyl/__init__.py b/homeassistant/components/pterodactyl/__init__.py new file mode 100644 index 00000000000..33b3cc7576f --- /dev/null +++ b/homeassistant/components/pterodactyl/__init__.py @@ -0,0 +1,27 @@ +"""The Pterodactyl integration.""" + +from __future__ import annotations + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from .coordinator import PterodactylConfigEntry, PterodactylCoordinator + +_PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR] + + +async def async_setup_entry(hass: HomeAssistant, entry: PterodactylConfigEntry) -> bool: + """Set up Pterodactyl from a config entry.""" + coordinator = PterodactylCoordinator(hass, entry) + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator + await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS) + + return True + + +async def async_unload_entry( + hass: HomeAssistant, entry: PterodactylConfigEntry +) -> bool: + """Unload a Pterodactyl config entry.""" + return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS) diff --git a/homeassistant/components/pterodactyl/api.py b/homeassistant/components/pterodactyl/api.py new file mode 100644 index 00000000000..38cb9809652 --- /dev/null +++ b/homeassistant/components/pterodactyl/api.py @@ -0,0 +1,120 @@ +"""API module of the Pterodactyl integration.""" + +from dataclasses import dataclass +import logging + +from pydactyl import PterodactylClient +from pydactyl.exceptions import ( + BadRequestError, + ClientConfigError, + PterodactylApiError, + PydactylError, +) + +from homeassistant.core import HomeAssistant + +_LOGGER = logging.getLogger(__name__) + + +class PterodactylConfigurationError(Exception): + """Raised when the configuration is invalid.""" + + +class PterodactylConnectionError(Exception): + """Raised when no data can be fechted from the server.""" + + +@dataclass +class PterodactylData: + """Data for the Pterodactyl server.""" + + name: str + uuid: str + identifier: str + state: str + memory_utilization: int + cpu_utilization: float + disk_utilization: int + network_rx_utilization: int + network_tx_utilization: int + uptime: int + + +class PterodactylAPI: + """Wrapper for Pterodactyl's API.""" + + pterodactyl: PterodactylClient | None + identifiers: list[str] + + def __init__(self, hass: HomeAssistant, host: str, api_key: str) -> None: + """Initialize the Pterodactyl API.""" + self.hass = hass + self.host = host + self.api_key = api_key + self.pterodactyl = None + self.identifiers = [] + + async def async_init(self): + """Initialize the Pterodactyl API.""" + self.pterodactyl = PterodactylClient(self.host, self.api_key) + + try: + paginated_response = await self.hass.async_add_executor_job( + self.pterodactyl.client.servers.list_servers + ) + except ClientConfigError as error: + raise PterodactylConfigurationError(error) from error + except ( + PydactylError, + BadRequestError, + PterodactylApiError, + ) as error: + raise PterodactylConnectionError(error) from error + else: + game_servers = paginated_response.collect() + for game_server in game_servers: + self.identifiers.append(game_server["attributes"]["identifier"]) + + _LOGGER.debug("Identifiers of Pterodactyl servers: %s", self.identifiers) + + def get_server_data(self, identifier: str) -> tuple[dict, dict]: + """Get all data from the Pterodactyl server.""" + server = self.pterodactyl.client.servers.get_server(identifier) # type: ignore[union-attr] + utilization = self.pterodactyl.client.servers.get_server_utilization( # type: ignore[union-attr] + identifier + ) + + return server, utilization + + async def async_get_data(self) -> dict[str, PterodactylData]: + """Update the data from all Pterodactyl servers.""" + data = {} + + for identifier in self.identifiers: + try: + server, utilization = await self.hass.async_add_executor_job( + self.get_server_data, identifier + ) + except ( + PydactylError, + BadRequestError, + PterodactylApiError, + ) as error: + raise PterodactylConnectionError(error) from error + else: + data[identifier] = PterodactylData( + name=server["name"], + uuid=server["uuid"], + identifier=identifier, + state=utilization["current_state"], + cpu_utilization=utilization["resources"]["cpu_absolute"], + memory_utilization=utilization["resources"]["memory_bytes"], + disk_utilization=utilization["resources"]["disk_bytes"], + network_rx_utilization=utilization["resources"]["network_rx_bytes"], + network_tx_utilization=utilization["resources"]["network_tx_bytes"], + uptime=utilization["resources"]["uptime"], + ) + + _LOGGER.debug("%s", data[identifier]) + + return data diff --git a/homeassistant/components/pterodactyl/binary_sensor.py b/homeassistant/components/pterodactyl/binary_sensor.py new file mode 100644 index 00000000000..e3615c47499 --- /dev/null +++ b/homeassistant/components/pterodactyl/binary_sensor.py @@ -0,0 +1,64 @@ +"""Binary sensor platform of the Pterodactyl integration.""" + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .coordinator import PterodactylConfigEntry, PterodactylCoordinator +from .entity import PterodactylEntity + +KEY_STATUS = "status" + + +BINARY_SENSOR_DESCRIPTIONS = [ + BinarySensorEntityDescription( + key=KEY_STATUS, + translation_key=KEY_STATUS, + device_class=BinarySensorDeviceClass.RUNNING, + ), +] + +# Coordinator is used to centralize the data updates. +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: PterodactylConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up the Pterodactyl binary sensor platform.""" + coordinator = config_entry.runtime_data + + async_add_entities( + PterodactylBinarySensorEntity( + coordinator, identifier, description, config_entry + ) + for identifier in coordinator.api.identifiers + for description in BINARY_SENSOR_DESCRIPTIONS + ) + + +class PterodactylBinarySensorEntity(PterodactylEntity, BinarySensorEntity): + """Representation of a Pterodactyl binary sensor base entity.""" + + def __init__( + self, + coordinator: PterodactylCoordinator, + identifier: str, + description: BinarySensorEntityDescription, + config_entry: PterodactylConfigEntry, + ) -> None: + """Initialize binary sensor base entity.""" + super().__init__(coordinator, identifier, config_entry) + self.entity_description = description + self._attr_unique_id = f"{self.game_server_data.uuid}_{description.key}" + + @property + def is_on(self) -> bool: + """Return binary sensor state.""" + return self.game_server_data.state == "running" diff --git a/homeassistant/components/pterodactyl/config_flow.py b/homeassistant/components/pterodactyl/config_flow.py new file mode 100644 index 00000000000..a36069d2bb9 --- /dev/null +++ b/homeassistant/components/pterodactyl/config_flow.py @@ -0,0 +1,62 @@ +"""Config flow for the Pterodactyl integration.""" + +from __future__ import annotations + +import logging +from typing import Any + +import voluptuous as vol +from yarl import URL + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_API_KEY, CONF_URL + +from .api import ( + PterodactylAPI, + PterodactylConfigurationError, + PterodactylConnectionError, +) +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_URL = "http://localhost:8080" + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_URL, default=DEFAULT_URL): str, + vol.Required(CONF_API_KEY): str, + } +) + + +class PterodactylConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Pterodactyl.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors: dict[str, str] = {} + if user_input is not None: + url = URL(user_input[CONF_URL]).human_repr() + api_key = user_input[CONF_API_KEY] + + self._async_abort_entries_match({CONF_URL: url}) + api = PterodactylAPI(self.hass, url, api_key) + + try: + await api.async_init() + except (PterodactylConfigurationError, PterodactylConnectionError): + errors["base"] = "cannot_connect" + except Exception: + _LOGGER.exception("Unexpected exception occurred during config flow") + errors["base"] = "unknown" + else: + return self.async_create_entry(title=url, data=user_input) + + return self.async_show_form( + step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors + ) diff --git a/homeassistant/components/pterodactyl/const.py b/homeassistant/components/pterodactyl/const.py new file mode 100644 index 00000000000..8cf4d0c3963 --- /dev/null +++ b/homeassistant/components/pterodactyl/const.py @@ -0,0 +1,3 @@ +"""Constants for the Pterodactyl integration.""" + +DOMAIN = "pterodactyl" diff --git a/homeassistant/components/pterodactyl/coordinator.py b/homeassistant/components/pterodactyl/coordinator.py new file mode 100644 index 00000000000..36456ade630 --- /dev/null +++ b/homeassistant/components/pterodactyl/coordinator.py @@ -0,0 +1,66 @@ +"""Data update coordinator of the Pterodactyl integration.""" + +from __future__ import annotations + +from datetime import timedelta +import logging + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_API_KEY, CONF_URL +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .api import ( + PterodactylAPI, + PterodactylConfigurationError, + PterodactylConnectionError, + PterodactylData, +) + +SCAN_INTERVAL = timedelta(seconds=60) + +_LOGGER = logging.getLogger(__name__) + +type PterodactylConfigEntry = ConfigEntry[PterodactylCoordinator] + + +class PterodactylCoordinator(DataUpdateCoordinator[dict[str, PterodactylData]]): + """Pterodactyl data update coordinator.""" + + config_entry: PterodactylConfigEntry + api: PterodactylAPI + + def __init__( + self, + hass: HomeAssistant, + config_entry: PterodactylConfigEntry, + ) -> None: + """Initialize coordinator instance.""" + + super().__init__( + hass=hass, + name=config_entry.data[CONF_URL], + config_entry=config_entry, + logger=_LOGGER, + update_interval=SCAN_INTERVAL, + ) + + async def _async_setup(self) -> None: + """Set up the Pterodactyl data coordinator.""" + self.api = PterodactylAPI( + hass=self.hass, + host=self.config_entry.data[CONF_URL], + api_key=self.config_entry.data[CONF_API_KEY], + ) + + try: + await self.api.async_init() + except PterodactylConfigurationError as error: + raise UpdateFailed(error) from error + + async def _async_update_data(self) -> dict[str, PterodactylData]: + """Get updated data from the Pterodactyl server.""" + try: + return await self.api.async_get_data() + except PterodactylConnectionError as error: + raise UpdateFailed(error) from error diff --git a/homeassistant/components/pterodactyl/entity.py b/homeassistant/components/pterodactyl/entity.py new file mode 100644 index 00000000000..49fd65af476 --- /dev/null +++ b/homeassistant/components/pterodactyl/entity.py @@ -0,0 +1,47 @@ +"""Base entity for the Pterodactyl integration.""" + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_URL +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .api import PterodactylData +from .const import DOMAIN +from .coordinator import PterodactylCoordinator + +MANUFACTURER = "Pterodactyl" + + +class PterodactylEntity(CoordinatorEntity[PterodactylCoordinator]): + """Representation of a Pterodactyl base entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: PterodactylCoordinator, + identifier: str, + config_entry: ConfigEntry, + ) -> None: + """Initialize base entity.""" + super().__init__(coordinator) + + self.identifier = identifier + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, identifier)}, + manufacturer=MANUFACTURER, + name=self.game_server_data.name, + model=self.game_server_data.name, + model_id=self.game_server_data.uuid, + configuration_url=f"{config_entry.data[CONF_URL]}/server/{identifier}", + ) + + @property + def available(self) -> bool: + """Return binary sensor availability.""" + return super().available and self.identifier in self.coordinator.data + + @property + def game_server_data(self) -> PterodactylData: + """Return game server data.""" + return self.coordinator.data[self.identifier] diff --git a/homeassistant/components/pterodactyl/manifest.json b/homeassistant/components/pterodactyl/manifest.json new file mode 100644 index 00000000000..8ffa21dd186 --- /dev/null +++ b/homeassistant/components/pterodactyl/manifest.json @@ -0,0 +1,10 @@ +{ + "domain": "pterodactyl", + "name": "Pterodactyl", + "codeowners": ["@elmurato"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/pterodactyl", + "iot_class": "local_polling", + "quality_scale": "bronze", + "requirements": ["py-dactyl==2.0.4"] +} diff --git a/homeassistant/components/pterodactyl/quality_scale.yaml b/homeassistant/components/pterodactyl/quality_scale.yaml new file mode 100644 index 00000000000..dae3b9fa11a --- /dev/null +++ b/homeassistant/components/pterodactyl/quality_scale.yaml @@ -0,0 +1,93 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: Integration doesn't provide any service actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow: done + config-flow-test-coverage: done + dependency-transparency: done + docs-actions: + status: exempt + comment: Integration doesn't provide any service actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: done + comment: Handled by coordinator. + entity-unique-id: + status: done + comment: Using confid entry ID as the dependency pydactyl doesn't provide a unique information. + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: + status: done + comment: | + Raising ConfigEntryNotReady, if the initialization isn't successful. + unique-config-entry: + status: done + comment: | + As there is no unique information available from the dependency pydactyl, + the server host is used to identify that the same service is already configured. + + # Silver + action-exceptions: + status: exempt + comment: Integration doesn't provide any service actions. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: Integration doesn't support any configuration parameters. + docs-installation-parameters: todo + entity-unavailable: + status: done + comment: Handled by coordinator. + integration-owner: done + log-when-unavailable: + status: done + comment: Handled by coordinator. + parallel-updates: done + reauthentication-flow: todo + test-coverage: todo + + # Gold + devices: done + diagnostics: todo + discovery: + status: exempt + comment: No discovery possible. + discovery-update-info: + status: exempt + comment: | + No discovery possible. Users can use the (local or public) hostname instead of an IP address, + if static IP addresses cannot be configured. + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: todo + entity-category: done + entity-device-class: done + entity-disabled-by-default: todo + entity-translations: done + exception-translations: todo + icon-translations: todo + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: No repair use-cases for this integration. + stale-devices: todo + + # Platinum + async-dependency: todo + inject-websession: + status: exempt + comment: Integration isn't making any HTTP requests. + strict-typing: todo diff --git a/homeassistant/components/pterodactyl/strings.json b/homeassistant/components/pterodactyl/strings.json new file mode 100644 index 00000000000..a875c72ccd8 --- /dev/null +++ b/homeassistant/components/pterodactyl/strings.json @@ -0,0 +1,30 @@ +{ + "config": { + "step": { + "user": { + "data": { + "url": "[%key:common::config_flow::data::url%]", + "api_key": "[%key:common::config_flow::data::api_key%]" + }, + "data_description": { + "url": "The URL of your Pterodactyl server, including the protocol (http:// or https://) and optionally the port number.", + "api_key": "The account API key for accessing your Pterodactyl server." + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + } + }, + "entity": { + "binary_sensor": { + "status": { + "name": "Status" + } + } + } +} diff --git a/homeassistant/components/pvoutput/manifest.json b/homeassistant/components/pvoutput/manifest.json index 9dbdad53bcb..dee5f9cda6e 100644 --- a/homeassistant/components/pvoutput/manifest.json +++ b/homeassistant/components/pvoutput/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/pvoutput", "integration_type": "device", "iot_class": "cloud_polling", - "requirements": ["pvo==2.2.0"] + "requirements": ["pvo==2.2.1"] } diff --git a/homeassistant/components/pvoutput/strings.json b/homeassistant/components/pvoutput/strings.json index 06d98971053..651bb55a2b4 100644 --- a/homeassistant/components/pvoutput/strings.json +++ b/homeassistant/components/pvoutput/strings.json @@ -27,19 +27,19 @@ "entity": { "sensor": { "energy_consumption": { - "name": "Energy consumed" + "name": "Energy consumption" }, "energy_generation": { - "name": "Energy generated" + "name": "Energy generation" }, "efficiency": { "name": "Efficiency" }, "power_consumption": { - "name": "Power consumed" + "name": "Power consumption" }, "power_generation": { - "name": "Power generated" + "name": "Power generation" } } } diff --git a/homeassistant/components/pyload/coordinator.py b/homeassistant/components/pyload/coordinator.py index c57dfa7720d..7bb2b870520 100644 --- a/homeassistant/components/pyload/coordinator.py +++ b/homeassistant/components/pyload/coordinator.py @@ -31,6 +31,7 @@ class PyLoadData: download: bool reconnect: bool captcha: bool | None = None + proxy: bool | None = None free_space: int diff --git a/homeassistant/components/qbus/climate.py b/homeassistant/components/qbus/climate.py new file mode 100644 index 00000000000..57d97c046b7 --- /dev/null +++ b/homeassistant/components/qbus/climate.py @@ -0,0 +1,172 @@ +"""Support for Qbus thermostat.""" + +import logging +from typing import Any + +from qbusmqttapi.const import KEY_PROPERTIES_REGIME, KEY_PROPERTIES_SET_TEMPERATURE +from qbusmqttapi.discovery import QbusMqttOutput +from qbusmqttapi.state import QbusMqttThermoState, StateType + +from homeassistant.components.climate import ( + ClimateEntity, + ClimateEntityFeature, + HVACAction, + HVACMode, +) +from homeassistant.components.mqtt import ReceiveMessage, client as mqtt +from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers.debounce import Debouncer +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .const import DOMAIN +from .coordinator import QbusConfigEntry +from .entity import QbusEntity, add_new_outputs + +PARALLEL_UPDATES = 0 + +STATE_REQUEST_DELAY = 2 + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: QbusConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up climate entities.""" + + coordinator = entry.runtime_data + added_outputs: list[QbusMqttOutput] = [] + + def _check_outputs() -> None: + add_new_outputs( + coordinator, + added_outputs, + lambda output: output.type == "thermo", + QbusClimate, + async_add_entities, + ) + + _check_outputs() + entry.async_on_unload(coordinator.async_add_listener(_check_outputs)) + + +class QbusClimate(QbusEntity, ClimateEntity): + """Representation of a Qbus climate entity.""" + + _attr_hvac_modes = [HVACMode.HEAT] + _attr_supported_features = ( + ClimateEntityFeature.PRESET_MODE | ClimateEntityFeature.TARGET_TEMPERATURE + ) + _attr_temperature_unit = UnitOfTemperature.CELSIUS + + def __init__(self, mqtt_output: QbusMqttOutput) -> None: + """Initialize climate entity.""" + + super().__init__(mqtt_output) + + self._attr_hvac_action = HVACAction.IDLE + self._attr_hvac_mode = HVACMode.HEAT + + set_temp: dict[str, Any] = mqtt_output.properties.get( + KEY_PROPERTIES_SET_TEMPERATURE, {} + ) + current_regime: dict[str, Any] = mqtt_output.properties.get( + KEY_PROPERTIES_REGIME, {} + ) + + self._attr_min_temp: float = set_temp.get("min", 0) + self._attr_max_temp: float = set_temp.get("max", 35) + self._attr_target_temperature_step: float = set_temp.get("step", 0.5) + self._attr_preset_modes: list[str] = current_regime.get("enumValues", []) + self._attr_preset_mode: str = ( + self._attr_preset_modes[0] if len(self._attr_preset_modes) > 0 else "" + ) + + self._request_state_debouncer: Debouncer | None = None + + async def async_added_to_hass(self) -> None: + """Run when entity about to be added to hass.""" + self._request_state_debouncer = Debouncer( + self.hass, + _LOGGER, + cooldown=STATE_REQUEST_DELAY, + immediate=False, + function=self._async_request_state, + ) + await super().async_added_to_hass() + + async def async_set_preset_mode(self, preset_mode: str) -> None: + """Set new target preset mode.""" + + if preset_mode not in self._attr_preset_modes: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_preset", + translation_placeholders={ + "preset": preset_mode, + "options": ", ".join(self._attr_preset_modes), + }, + ) + + state = QbusMqttThermoState(id=self._mqtt_output.id, type=StateType.STATE) + state.write_regime(preset_mode) + + await self._async_publish_output_state(state) + + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set new target temperature.""" + temperature = kwargs.get(ATTR_TEMPERATURE) + + if temperature is not None and isinstance(temperature, float): + state = QbusMqttThermoState(id=self._mqtt_output.id, type=StateType.STATE) + state.write_set_temperature(temperature) + + await self._async_publish_output_state(state) + + async def _state_received(self, msg: ReceiveMessage) -> None: + state = self._message_factory.parse_output_state( + QbusMqttThermoState, msg.payload + ) + + if state is None: + return + + if preset_mode := state.read_regime(): + self._attr_preset_mode = preset_mode + + if current_temperature := state.read_current_temperature(): + self._attr_current_temperature = current_temperature + + if target_temperature := state.read_set_temperature(): + self._attr_target_temperature = target_temperature + + self._set_hvac_action() + + # When the state type is "event", the payload only contains the changed + # property. Request the state to get the full payload. However, changing + # temperature step by step could cause a flood of state requests, so we're + # holding off a few seconds before requesting the full state. + if state.type == StateType.EVENT: + assert self._request_state_debouncer is not None + await self._request_state_debouncer.async_call() + + self.async_schedule_update_ha_state() + + def _set_hvac_action(self) -> None: + if self.target_temperature is None or self.current_temperature is None: + self._attr_hvac_action = HVACAction.IDLE + return + + self._attr_hvac_action = ( + HVACAction.HEATING + if self.target_temperature > self.current_temperature + else HVACAction.IDLE + ) + + async def _async_request_state(self) -> None: + request = self._message_factory.create_state_request([self._mqtt_output.id]) + await mqtt.async_publish(self.hass, request.topic, request.payload) diff --git a/homeassistant/components/qbus/const.py b/homeassistant/components/qbus/const.py index b9e42f13766..767a41f48cc 100644 --- a/homeassistant/components/qbus/const.py +++ b/homeassistant/components/qbus/const.py @@ -6,6 +6,7 @@ from homeassistant.const import Platform DOMAIN: Final = "qbus" PLATFORMS: list[Platform] = [ + Platform.CLIMATE, Platform.LIGHT, Platform.SWITCH, ] diff --git a/homeassistant/components/qbus/light.py b/homeassistant/components/qbus/light.py index 5ec76f5e807..3d2c763b8e3 100644 --- a/homeassistant/components/qbus/light.py +++ b/homeassistant/components/qbus/light.py @@ -51,7 +51,7 @@ class QbusLight(QbusEntity, LightEntity): super().__init__(mqtt_output) - self._set_state() + self._set_state(0) async def async_turn_on(self, **kwargs: Any) -> None: """Turn the entity on.""" @@ -74,7 +74,6 @@ class QbusLight(QbusEntity, LightEntity): state.write_percentage(percentage) await self._async_publish_output_state(state) - self._set_state(percentage=percentage, on=on) async def async_turn_off(self, **kwargs: Any) -> None: """Turn the entity off.""" @@ -82,7 +81,6 @@ class QbusLight(QbusEntity, LightEntity): state.write_on_off(on=False) await self._async_publish_output_state(state) - self._set_state(on=False) async def _state_received(self, msg: ReceiveMessage) -> None: output = self._message_factory.parse_output_state( @@ -91,20 +89,9 @@ class QbusLight(QbusEntity, LightEntity): if output is not None: percentage = round(output.read_percentage()) - self._set_state(percentage=percentage) + self._set_state(percentage) self.async_schedule_update_ha_state() - def _set_state( - self, *, percentage: int | None = None, on: bool | None = None - ) -> None: - if percentage is None: - # When turning on without brightness, we don't know the desired - # brightness. It will be set during _state_received(). - if on is True: - self._attr_is_on = True - else: - self._attr_is_on = False - self._attr_brightness = 0 - else: - self._attr_is_on = percentage > 0 - self._attr_brightness = value_to_brightness((1, 100), percentage) + def _set_state(self, percentage: int = 0) -> None: + self._attr_is_on = percentage > 0 + self._attr_brightness = value_to_brightness((1, 100), percentage) diff --git a/homeassistant/components/qbus/strings.json b/homeassistant/components/qbus/strings.json index e6df18c393c..f308c5b3519 100644 --- a/homeassistant/components/qbus/strings.json +++ b/homeassistant/components/qbus/strings.json @@ -15,5 +15,10 @@ "error": { "no_controller": "No controllers were found" } + }, + "exceptions": { + "invalid_preset": { + "message": "Preset mode \"{preset}\" is not valid. Valid preset modes are: {options}." + } } } diff --git a/homeassistant/components/qbus/switch.py b/homeassistant/components/qbus/switch.py index 002ad43e904..e1feccf4450 100644 --- a/homeassistant/components/qbus/switch.py +++ b/homeassistant/components/qbus/switch.py @@ -57,7 +57,6 @@ class QbusSwitch(QbusEntity, SwitchEntity): state.write_value(True) await self._async_publish_output_state(state) - self._attr_is_on = True async def async_turn_off(self, **kwargs: Any) -> None: """Turn the entity off.""" @@ -65,7 +64,6 @@ class QbusSwitch(QbusEntity, SwitchEntity): state.write_value(False) await self._async_publish_output_state(state) - self._attr_is_on = False async def _state_received(self, msg: ReceiveMessage) -> None: output = self._message_factory.parse_output_state( diff --git a/homeassistant/components/qnap/config_flow.py b/homeassistant/components/qnap/config_flow.py index 75f41a27f69..504883b55e9 100644 --- a/homeassistant/components/qnap/config_flow.py +++ b/homeassistant/components/qnap/config_flow.py @@ -70,8 +70,8 @@ class QnapConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" except TypeError: errors["base"] = "invalid_auth" - except Exception as error: # noqa: BLE001 - _LOGGER.error(error) + except Exception: + _LOGGER.exception("Unexpected error") errors["base"] = "unknown" else: unique_id = stats["system"]["serial_number"] diff --git a/homeassistant/components/qnap/coordinator.py b/homeassistant/components/qnap/coordinator.py index 297f6569d2b..a6d654ddbbd 100644 --- a/homeassistant/components/qnap/coordinator.py +++ b/homeassistant/components/qnap/coordinator.py @@ -2,11 +2,13 @@ from __future__ import annotations +from contextlib import contextmanager, nullcontext from datetime import timedelta import logging from typing import Any from qnapstats import QNAPStats +import urllib3 from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -28,6 +30,17 @@ UPDATE_INTERVAL = timedelta(minutes=1) _LOGGER = logging.getLogger(__name__) +@contextmanager +def suppress_insecure_request_warning(): + """Context manager to suppress InsecureRequestWarning. + + Was added in here to solve the following issue, not being solved upstream. + https://github.com/colinodell/python-qnapstats/issues/96 + """ + with urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning): + yield + + class QnapCoordinator(DataUpdateCoordinator[dict[str, dict[str, Any]]]): """Custom coordinator for the qnap integration.""" @@ -42,24 +55,31 @@ class QnapCoordinator(DataUpdateCoordinator[dict[str, dict[str, Any]]]): ) protocol = "https" if config_entry.data[CONF_SSL] else "http" + self._verify_ssl = config_entry.data.get(CONF_VERIFY_SSL) + self._api = QNAPStats( f"{protocol}://{config_entry.data.get(CONF_HOST)}", config_entry.data.get(CONF_PORT), config_entry.data.get(CONF_USERNAME), config_entry.data.get(CONF_PASSWORD), - verify_ssl=config_entry.data.get(CONF_VERIFY_SSL), + verify_ssl=self._verify_ssl, timeout=config_entry.data.get(CONF_TIMEOUT), ) def _sync_update(self) -> dict[str, dict[str, Any]]: """Get the latest data from the Qnap API.""" - return { - "system_stats": self._api.get_system_stats(), - "system_health": self._api.get_system_health(), - "smart_drive_health": self._api.get_smart_disk_health(), - "volumes": self._api.get_volumes(), - "bandwidth": self._api.get_bandwidth(), - } + with ( + suppress_insecure_request_warning() + if not self._verify_ssl + else nullcontext() + ): + return { + "system_stats": self._api.get_system_stats(), + "system_health": self._api.get_system_health(), + "smart_drive_health": self._api.get_smart_disk_health(), + "volumes": self._api.get_volumes(), + "bandwidth": self._api.get_bandwidth(), + } async def _async_update_data(self) -> dict[str, dict[str, Any]]: """Get the latest data from the Qnap API.""" diff --git a/homeassistant/components/rabbitair/config_flow.py b/homeassistant/components/rabbitair/config_flow.py index f4487a73b58..43959e1e42c 100644 --- a/homeassistant/components/rabbitair/config_flow.py +++ b/homeassistant/components/rabbitair/config_flow.py @@ -74,8 +74,8 @@ class RabbitAirConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_host" except TimeoutConnect: errors["base"] = "timeout_connect" - except Exception as err: # noqa: BLE001 - _LOGGER.debug("Unexpected exception: %s", err) + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: user_input[CONF_MAC] = info["mac"] diff --git a/homeassistant/components/rainforest_eagle/sensor.py b/homeassistant/components/rainforest_eagle/sensor.py index 58427b0e5ba..6f4cbf4f02c 100644 --- a/homeassistant/components/rainforest_eagle/sensor.py +++ b/homeassistant/components/rainforest_eagle/sensor.py @@ -59,7 +59,7 @@ async def async_setup_entry( coordinator, SensorEntityDescription( key="zigbee:Price", - translation_key="meter_price", + translation_key="energy_price", native_unit_of_measurement=f"{coordinator.data['zigbee:PriceCurrency']}/{UnitOfEnergy.KILO_WATT_HOUR}", state_class=SensorStateClass.MEASUREMENT, ), diff --git a/homeassistant/components/rainforest_eagle/strings.json b/homeassistant/components/rainforest_eagle/strings.json index 7b5054bfb0f..08e237d5af0 100644 --- a/homeassistant/components/rainforest_eagle/strings.json +++ b/homeassistant/components/rainforest_eagle/strings.json @@ -5,7 +5,7 @@ "data": { "host": "[%key:common::config_flow::data::host%]", "cloud_id": "Cloud ID", - "install_code": "Installation Code" + "install_code": "Installation code" }, "data_description": { "host": "The hostname or IP address of your Rainforest gateway." @@ -24,16 +24,16 @@ "entity": { "sensor": { "power_demand": { - "name": "Meter power demand" + "name": "Power demand" }, "total_energy_delivered": { - "name": "Total meter energy delivered" + "name": "Total energy delivered" }, "total_energy_received": { - "name": "Total meter energy received" + "name": "Total energy received" }, - "meter_price": { - "name": "Meter price" + "energy_price": { + "name": "Energy price" } } } diff --git a/homeassistant/components/rainforest_raven/sensor.py b/homeassistant/components/rainforest_raven/sensor.py index 3d358322b70..658689c7e6c 100644 --- a/homeassistant/components/rainforest_raven/sensor.py +++ b/homeassistant/components/rainforest_raven/sensor.py @@ -101,7 +101,7 @@ async def async_setup_entry( coordinator, RAVEnSensorEntityDescription( message_key="PriceCluster", - translation_key="meter_price", + translation_key="energy_price", key="price", native_unit_of_measurement=f"{meter_data['PriceCluster']['currency'].value}/{UnitOfEnergy.KILO_WATT_HOUR}", state_class=SensorStateClass.MEASUREMENT, diff --git a/homeassistant/components/rainforest_raven/strings.json b/homeassistant/components/rainforest_raven/strings.json index fb667d64d3f..bc2653aea87 100644 --- a/homeassistant/components/rainforest_raven/strings.json +++ b/homeassistant/components/rainforest_raven/strings.json @@ -12,7 +12,7 @@ "step": { "meters": { "data": { - "mac": "Meter MAC Addresses" + "mac": "Meter MAC addresses" } }, "user": { @@ -24,27 +24,27 @@ }, "entity": { "sensor": { - "meter_price": { - "name": "Meter price", + "energy_price": { + "name": "Energy price", "state_attributes": { "rate_label": { "name": "Rate" }, "tier": { "name": "Tier" } } }, "power_demand": { - "name": "Meter power demand" + "name": "Power demand" }, "signal_strength": { - "name": "Meter signal strength", + "name": "Signal strength", "state_attributes": { "channel": { "name": "Channel" } } }, "total_energy_delivered": { - "name": "Total meter energy delivered" + "name": "Total energy delivered" }, "total_energy_received": { - "name": "Total meter energy received" + "name": "Total energy received" } } } diff --git a/homeassistant/components/recorder/const.py b/homeassistant/components/recorder/const.py index 36ff63a0496..4797eecda0f 100644 --- a/homeassistant/components/recorder/const.py +++ b/homeassistant/components/recorder/const.py @@ -54,6 +54,7 @@ CONTEXT_ID_AS_BINARY_SCHEMA_VERSION = 36 EVENT_TYPE_IDS_SCHEMA_VERSION = 37 STATES_META_SCHEMA_VERSION = 38 LAST_REPORTED_SCHEMA_VERSION = 43 +CIRCULAR_MEAN_SCHEMA_VERSION = 49 LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION = 28 LEGACY_STATES_EVENT_FOREIGN_KEYS_FIXED_SCHEMA_VERSION = 43 diff --git a/homeassistant/components/recorder/core.py b/homeassistant/components/recorder/core.py index 62afa0e7b04..7b8043b9201 100644 --- a/homeassistant/components/recorder/core.py +++ b/homeassistant/components/recorder/core.py @@ -79,7 +79,13 @@ from .db_schema import ( StatisticsShortTerm, ) from .executor import DBInterruptibleThreadPoolExecutor -from .models import DatabaseEngine, StatisticData, StatisticMetaData, UnsupportedDialect +from .models import ( + DatabaseEngine, + StatisticData, + StatisticMeanType, + StatisticMetaData, + UnsupportedDialect, +) from .pool import POOL_SIZE, MutexPool, RecorderPool from .table_managers.event_data import EventDataManager from .table_managers.event_types import EventTypeManager @@ -611,6 +617,17 @@ class Recorder(threading.Thread): table: type[Statistics | StatisticsShortTerm], ) -> None: """Schedule import of statistics.""" + if "mean_type" not in metadata: + # Backwards compatibility for old metadata format + # Can be removed after 2026.4 + metadata["mean_type"] = ( # type: ignore[unreachable] + StatisticMeanType.ARITHMETIC + if metadata.get("has_mean") + else StatisticMeanType.NONE + ) + # Remove deprecated has_mean as it's not needed anymore in core + metadata.pop("has_mean", None) + self.queue_task(ImportStatisticsTask(metadata, stats, table)) @callback diff --git a/homeassistant/components/recorder/db_schema.py b/homeassistant/components/recorder/db_schema.py index bc8fcd1310e..6566cadf64c 100644 --- a/homeassistant/components/recorder/db_schema.py +++ b/homeassistant/components/recorder/db_schema.py @@ -58,6 +58,7 @@ from .const import ALL_DOMAIN_EXCLUDE_ATTRS, SupportedDialect from .models import ( StatisticData, StatisticDataTimestamp, + StatisticMeanType, StatisticMetaData, bytes_to_ulid_or_none, bytes_to_uuid_hex_or_none, @@ -77,7 +78,7 @@ class LegacyBase(DeclarativeBase): """Base class for tables, used for schema migration.""" -SCHEMA_VERSION = 48 +SCHEMA_VERSION = 50 _LOGGER = logging.getLogger(__name__) @@ -719,6 +720,7 @@ class StatisticsBase: start: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) start_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE, index=True) mean: Mapped[float | None] = mapped_column(DOUBLE_TYPE) + mean_weight: Mapped[float | None] = mapped_column(DOUBLE_TYPE) min: Mapped[float | None] = mapped_column(DOUBLE_TYPE) max: Mapped[float | None] = mapped_column(DOUBLE_TYPE) last_reset: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) @@ -740,6 +742,7 @@ class StatisticsBase: start=None, start_ts=stats["start"].timestamp(), mean=stats.get("mean"), + mean_weight=stats.get("mean_weight"), min=stats.get("min"), max=stats.get("max"), last_reset=None, @@ -763,6 +766,7 @@ class StatisticsBase: start=None, start_ts=stats["start_ts"], mean=stats.get("mean"), + mean_weight=stats.get("mean_weight"), min=stats.get("min"), max=stats.get("max"), last_reset=None, @@ -848,6 +852,9 @@ class _StatisticsMeta: has_mean: Mapped[bool | None] = mapped_column(Boolean) has_sum: Mapped[bool | None] = mapped_column(Boolean) name: Mapped[str | None] = mapped_column(String(255)) + mean_type: Mapped[StatisticMeanType] = mapped_column( + SmallInteger, nullable=False, default=StatisticMeanType.NONE.value + ) # See StatisticMeanType @staticmethod def from_meta(meta: StatisticMetaData) -> StatisticsMeta: diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index c5eea0f7088..58af15c2aa7 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -81,7 +81,7 @@ from .db_schema import ( StatisticsRuns, StatisticsShortTerm, ) -from .models import process_timestamp +from .models import StatisticMeanType, process_timestamp from .models.time import datetime_to_timestamp_or_none from .queries import ( batch_cleanup_entity_ids, @@ -144,24 +144,32 @@ class _ColumnTypesForDialect: big_int_type: str timestamp_type: str context_bin_type: str + small_int_type: str + double_type: str _MYSQL_COLUMN_TYPES = _ColumnTypesForDialect( big_int_type="INTEGER(20)", timestamp_type=DOUBLE_PRECISION_TYPE_SQL, context_bin_type=f"BLOB({CONTEXT_ID_BIN_MAX_LENGTH})", + small_int_type="SMALLINT", + double_type=DOUBLE_PRECISION_TYPE_SQL, ) _POSTGRESQL_COLUMN_TYPES = _ColumnTypesForDialect( big_int_type="INTEGER", timestamp_type=DOUBLE_PRECISION_TYPE_SQL, context_bin_type="BYTEA", + small_int_type="SMALLINT", + double_type=DOUBLE_PRECISION_TYPE_SQL, ) _SQLITE_COLUMN_TYPES = _ColumnTypesForDialect( big_int_type="INTEGER", timestamp_type="FLOAT", context_bin_type="BLOB", + small_int_type="INTEGER", + double_type="FLOAT", ) _COLUMN_TYPES_FOR_DIALECT: dict[SupportedDialect | None, _ColumnTypesForDialect] = { @@ -1993,6 +2001,42 @@ class _SchemaVersion48Migrator(_SchemaVersionMigrator, target_version=48): _migrate_columns_to_timestamp(self.instance, self.session_maker, self.engine) +class _SchemaVersion49Migrator(_SchemaVersionMigrator, target_version=49): + def _apply_update(self) -> None: + """Version specific update method.""" + _add_columns( + self.session_maker, + "statistics_meta", + [ + f"mean_type {self.column_types.small_int_type} NOT NULL DEFAULT {StatisticMeanType.NONE.value}" + ], + ) + + for table in ("statistics", "statistics_short_term"): + _add_columns( + self.session_maker, + table, + [f"mean_weight {self.column_types.double_type}"], + ) + + with session_scope(session=self.session_maker()) as session: + connection = session.connection() + connection.execute( + text( + "UPDATE statistics_meta SET mean_type=:mean_type WHERE has_mean=true" + ), + {"mean_type": StatisticMeanType.ARITHMETIC.value}, + ) + + +class _SchemaVersion50Migrator(_SchemaVersionMigrator, target_version=50): + def _apply_update(self) -> None: + """Version specific update method.""" + with session_scope(session=self.session_maker()) as session: + connection = session.connection() + connection.execute(text("UPDATE statistics_meta SET has_mean=NULL")) + + def _migrate_statistics_columns_to_timestamp_removing_duplicates( hass: HomeAssistant, instance: Recorder, diff --git a/homeassistant/components/recorder/models/__init__.py b/homeassistant/components/recorder/models/__init__.py index ea7a6c86854..8f76982a900 100644 --- a/homeassistant/components/recorder/models/__init__.py +++ b/homeassistant/components/recorder/models/__init__.py @@ -17,6 +17,7 @@ from .statistics import ( RollingWindowStatisticPeriod, StatisticData, StatisticDataTimestamp, + StatisticMeanType, StatisticMetaData, StatisticPeriod, StatisticResult, @@ -37,6 +38,7 @@ __all__ = [ "RollingWindowStatisticPeriod", "StatisticData", "StatisticDataTimestamp", + "StatisticMeanType", "StatisticMetaData", "StatisticPeriod", "StatisticResult", diff --git a/homeassistant/components/recorder/models/statistics.py b/homeassistant/components/recorder/models/statistics.py index ad4d82067c4..08da12d6b17 100644 --- a/homeassistant/components/recorder/models/statistics.py +++ b/homeassistant/components/recorder/models/statistics.py @@ -3,7 +3,8 @@ from __future__ import annotations from datetime import datetime, timedelta -from typing import Literal, TypedDict +from enum import IntEnum +from typing import Literal, NotRequired, TypedDict class StatisticResult(TypedDict): @@ -36,6 +37,7 @@ class StatisticMixIn(TypedDict, total=False): min: float max: float mean: float + mean_weight: float class StatisticData(StatisticDataBase, StatisticMixIn, total=False): @@ -50,10 +52,20 @@ class StatisticDataTimestamp(StatisticDataTimestampBase, StatisticMixIn, total=F last_reset_ts: float | None +class StatisticMeanType(IntEnum): + """Statistic mean type.""" + + NONE = 0 + ARITHMETIC = 1 + CIRCULAR = 2 + + class StatisticMetaData(TypedDict): """Statistic meta data class.""" - has_mean: bool + # has_mean is deprecated, use mean_type instead. has_mean will be removed in 2026.4 + has_mean: NotRequired[bool] + mean_type: StatisticMeanType has_sum: bool name: str | None source: str diff --git a/homeassistant/components/recorder/statistics.py b/homeassistant/components/recorder/statistics.py index e26a69c0db9..2507a66899e 100644 --- a/homeassistant/components/recorder/statistics.py +++ b/homeassistant/components/recorder/statistics.py @@ -9,12 +9,23 @@ from datetime import datetime, timedelta from functools import lru_cache, partial from itertools import chain, groupby import logging +import math from operator import itemgetter import re from time import time as time_time -from typing import TYPE_CHECKING, Any, Literal, TypedDict, cast +from typing import TYPE_CHECKING, Any, Literal, Required, TypedDict, cast -from sqlalchemy import Select, and_, bindparam, func, lambda_stmt, select, text +from sqlalchemy import ( + Label, + Select, + and_, + bindparam, + case, + func, + lambda_stmt, + select, + text, +) from sqlalchemy.engine.row import Row from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.orm.session import Session @@ -29,6 +40,7 @@ from homeassistant.helpers.singleton import singleton from homeassistant.helpers.typing import UNDEFINED, UndefinedType from homeassistant.util import dt as dt_util from homeassistant.util.collection import chunked_or_all +from homeassistant.util.enum import try_parse_enum from homeassistant.util.unit_conversion import ( AreaConverter, BaseUnitConverter, @@ -74,6 +86,7 @@ from .db_schema import ( from .models import ( StatisticData, StatisticDataTimestamp, + StatisticMeanType, StatisticMetaData, StatisticResult, datetime_to_timestamp_or_none, @@ -113,11 +126,54 @@ QUERY_STATISTICS_SHORT_TERM = ( StatisticsShortTerm.sum, ) + +def query_circular_mean(table: type[StatisticsBase]) -> tuple[Label, Label]: + """Return the sqlalchemy function for circular mean and the mean_weight. + + The result must be modulo 360 to normalize the result [0, 360]. + """ + # Postgres doesn't support modulo for double precision and + # the other dbs return the remainder instead of the modulo + # meaning negative values are possible. For these reason + # we need to normalize the result to be in the range [0, 360) + # in Python. + # https://en.wikipedia.org/wiki/Circular_mean + radians = func.radians(table.mean) + weight = func.sqrt( + func.power(func.sum(func.sin(radians) * table.mean_weight), 2) + + func.power(func.sum(func.cos(radians) * table.mean_weight), 2) + ) + return ( + func.degrees( + func.atan2(func.sum(func.sin(radians)), func.sum(func.cos(radians))) + ).label("mean"), + weight.label("mean_weight"), + ) + + QUERY_STATISTICS_SUMMARY_MEAN = ( StatisticsShortTerm.metadata_id, - func.avg(StatisticsShortTerm.mean), func.min(StatisticsShortTerm.min), func.max(StatisticsShortTerm.max), + case( + ( + StatisticsMeta.mean_type == StatisticMeanType.ARITHMETIC, + func.avg(StatisticsShortTerm.mean), + ), + ( + StatisticsMeta.mean_type == StatisticMeanType.CIRCULAR, + query_circular_mean(StatisticsShortTerm)[0], + ), + else_=None, + ), + case( + ( + StatisticsMeta.mean_type == StatisticMeanType.CIRCULAR, + query_circular_mean(StatisticsShortTerm)[1], + ), + else_=None, + ), + StatisticsMeta.mean_type, ) QUERY_STATISTICS_SUMMARY_SUM = ( @@ -180,6 +236,24 @@ def mean(values: list[float]) -> float | None: return sum(values) / len(values) +DEG_TO_RAD = math.pi / 180 +RAD_TO_DEG = 180 / math.pi + + +def weighted_circular_mean(values: Iterable[tuple[float, float]]) -> float: + """Return the weighted circular mean of the values.""" + sin_sum = sum(math.sin(x * DEG_TO_RAD) * weight for x, weight in values) + cos_sum = sum(math.cos(x * DEG_TO_RAD) * weight for x, weight in values) + return (RAD_TO_DEG * math.atan2(sin_sum, cos_sum)) % 360 + + +def circular_mean(values: list[float]) -> float: + """Return the circular mean of the values.""" + sin_sum = sum(math.sin(x * DEG_TO_RAD) for x in values) + cos_sum = sum(math.cos(x * DEG_TO_RAD) for x in values) + return (RAD_TO_DEG * math.atan2(sin_sum, cos_sum)) % 360 + + _LOGGER = logging.getLogger(__name__) @@ -372,11 +446,19 @@ def _compile_hourly_statistics_summary_mean_stmt( start_time_ts: float, end_time_ts: float ) -> StatementLambdaElement: """Generate the summary mean statement for hourly statistics.""" + # Due the fact that we support different mean type (See StatisticMeanType) + # we need to join here with the StatisticsMeta table to get the mean type + # and then use a case statement to compute the mean based on the mean type. + # As we use the StatisticsMeta.mean_type in the select case statement we need + # to group by it as well. return lambda_stmt( lambda: select(*QUERY_STATISTICS_SUMMARY_MEAN) .filter(StatisticsShortTerm.start_ts >= start_time_ts) .filter(StatisticsShortTerm.start_ts < end_time_ts) - .group_by(StatisticsShortTerm.metadata_id) + .join( + StatisticsMeta, and_(StatisticsShortTerm.metadata_id == StatisticsMeta.id) + ) + .group_by(StatisticsShortTerm.metadata_id, StatisticsMeta.mean_type) .order_by(StatisticsShortTerm.metadata_id) ) @@ -418,10 +500,17 @@ def _compile_hourly_statistics(session: Session, start: datetime) -> None: if stats: for stat in stats: - metadata_id, _mean, _min, _max = stat + metadata_id, _min, _max, _mean, _mean_weight, _mean_type = stat + if ( + try_parse_enum(StatisticMeanType, _mean_type) + is StatisticMeanType.CIRCULAR + ): + # Normalize the circular mean to be in the range [0, 360) + _mean = _mean % 360 summary[metadata_id] = { "start_ts": start_time_ts, "mean": _mean, + "mean_weight": _mean_weight, "min": _min, "max": _max, } @@ -827,7 +916,7 @@ def _statistic_by_id_from_metadata( "display_unit_of_measurement": get_display_unit( hass, meta["statistic_id"], meta["unit_of_measurement"] ), - "has_mean": meta["has_mean"], + "mean_type": meta["mean_type"], "has_sum": meta["has_sum"], "name": meta["name"], "source": meta["source"], @@ -846,7 +935,9 @@ def _flatten_list_statistic_ids_metadata_result( { "statistic_id": _id, "display_unit_of_measurement": info["display_unit_of_measurement"], - "has_mean": info["has_mean"], + "has_mean": info["mean_type"] + == StatisticMeanType.ARITHMETIC, # Can be removed with 2026.4 + "mean_type": info["mean_type"], "has_sum": info["has_sum"], "name": info.get("name"), "source": info["source"], @@ -901,7 +992,7 @@ def list_statistic_ids( continue result[key] = { "display_unit_of_measurement": meta["unit_of_measurement"], - "has_mean": meta["has_mean"], + "mean_type": meta["mean_type"], "has_sum": meta["has_sum"], "name": meta["name"], "source": meta["source"], @@ -919,6 +1010,7 @@ def _reduce_statistics( period_start_end: Callable[[float], tuple[float, float]], period: timedelta, types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]], + metadata: dict[str, tuple[int, StatisticMetaData]], ) -> dict[str, list[StatisticsRow]]: """Reduce hourly statistics to daily or monthly statistics.""" result: dict[str, list[StatisticsRow]] = defaultdict(list) @@ -946,7 +1038,13 @@ def _reduce_statistics( "end": end, } if _want_mean: - row["mean"] = mean(mean_values) if mean_values else None + row["mean"] = None + if mean_values: + match metadata[statistic_id][1]["mean_type"]: + case StatisticMeanType.ARITHMETIC: + row["mean"] = mean(mean_values) + case StatisticMeanType.CIRCULAR: + row["mean"] = circular_mean(mean_values) mean_values.clear() if _want_min: row["min"] = min(min_values) if min_values else None @@ -963,8 +1061,9 @@ def _reduce_statistics( result[statistic_id].append(row) if _want_max and (_max := statistic.get("max")) is not None: max_values.append(_max) - if _want_mean and (_mean := statistic.get("mean")) is not None: - mean_values.append(_mean) + if _want_mean: + if (_mean := statistic.get("mean")) is not None: + mean_values.append(_mean) if _want_min and (_min := statistic.get("min")) is not None: min_values.append(_min) prev_stat = statistic @@ -1011,11 +1110,12 @@ def reduce_day_ts_factory() -> tuple[ def _reduce_statistics_per_day( stats: dict[str, list[StatisticsRow]], types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]], + metadata: dict[str, tuple[int, StatisticMetaData]], ) -> dict[str, list[StatisticsRow]]: """Reduce hourly statistics to daily statistics.""" _same_day_ts, _day_start_end_ts = reduce_day_ts_factory() return _reduce_statistics( - stats, _same_day_ts, _day_start_end_ts, timedelta(days=1), types + stats, _same_day_ts, _day_start_end_ts, timedelta(days=1), types, metadata ) @@ -1059,11 +1159,12 @@ def reduce_week_ts_factory() -> tuple[ def _reduce_statistics_per_week( stats: dict[str, list[StatisticsRow]], types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]], + metadata: dict[str, tuple[int, StatisticMetaData]], ) -> dict[str, list[StatisticsRow]]: """Reduce hourly statistics to weekly statistics.""" _same_week_ts, _week_start_end_ts = reduce_week_ts_factory() return _reduce_statistics( - stats, _same_week_ts, _week_start_end_ts, timedelta(days=7), types + stats, _same_week_ts, _week_start_end_ts, timedelta(days=7), types, metadata ) @@ -1112,11 +1213,12 @@ def reduce_month_ts_factory() -> tuple[ def _reduce_statistics_per_month( stats: dict[str, list[StatisticsRow]], types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]], + metadata: dict[str, tuple[int, StatisticMetaData]], ) -> dict[str, list[StatisticsRow]]: """Reduce hourly statistics to monthly statistics.""" _same_month_ts, _month_start_end_ts = reduce_month_ts_factory() return _reduce_statistics( - stats, _same_month_ts, _month_start_end_ts, timedelta(days=31), types + stats, _same_month_ts, _month_start_end_ts, timedelta(days=31), types, metadata ) @@ -1160,27 +1262,41 @@ def _generate_max_mean_min_statistic_in_sub_period_stmt( return stmt +class _MaxMinMeanStatisticSubPeriod(TypedDict, total=False): + max: float + mean_acc: float + min: float + duration: float + circular_means: Required[list[tuple[float, float]]] + + def _get_max_mean_min_statistic_in_sub_period( session: Session, - result: dict[str, float], + result: _MaxMinMeanStatisticSubPeriod, start_time: datetime | None, end_time: datetime | None, table: type[StatisticsBase], types: set[Literal["max", "mean", "min", "change"]], - metadata_id: int, + metadata: tuple[int, StatisticMetaData], ) -> None: """Return max, mean and min during the period.""" # Calculate max, mean, min + mean_type = metadata[1]["mean_type"] columns = select() if "max" in types: columns = columns.add_columns(func.max(table.max)) if "mean" in types: - columns = columns.add_columns(func.avg(table.mean)) - columns = columns.add_columns(func.count(table.mean)) + match mean_type: + case StatisticMeanType.ARITHMETIC: + columns = columns.add_columns(func.avg(table.mean)) + columns = columns.add_columns(func.count(table.mean)) + case StatisticMeanType.CIRCULAR: + columns = columns.add_columns(*query_circular_mean(table)) if "min" in types: columns = columns.add_columns(func.min(table.min)) + stmt = _generate_max_mean_min_statistic_in_sub_period_stmt( - columns, start_time, end_time, table, metadata_id + columns, start_time, end_time, table, metadata[0] ) stats = cast(Sequence[Row[Any]], execute_stmt_lambda_element(session, stmt)) if not stats: @@ -1188,11 +1304,21 @@ def _get_max_mean_min_statistic_in_sub_period( if "max" in types and (new_max := stats[0].max) is not None: old_max = result.get("max") result["max"] = max(new_max, old_max) if old_max is not None else new_max - if "mean" in types and stats[0].avg is not None: + if "mean" in types: # https://github.com/sqlalchemy/sqlalchemy/issues/9127 - duration = stats[0].count * table.duration.total_seconds() # type: ignore[operator] - result["duration"] = result.get("duration", 0.0) + duration - result["mean_acc"] = result.get("mean_acc", 0.0) + stats[0].avg * duration + match mean_type: + case StatisticMeanType.ARITHMETIC: + duration = stats[0].count * table.duration.total_seconds() # type: ignore[operator] + if stats[0].avg is not None: + result["duration"] = result.get("duration", 0.0) + duration + result["mean_acc"] = ( + result.get("mean_acc", 0.0) + stats[0].avg * duration + ) + case StatisticMeanType.CIRCULAR: + if (new_circular_mean := stats[0].mean) is not None and ( + weight := stats[0].mean_weight + ) is not None: + result["circular_means"].append((new_circular_mean, weight)) if "min" in types and (new_min := stats[0].min) is not None: old_min = result.get("min") result["min"] = min(new_min, old_min) if old_min is not None else new_min @@ -1207,15 +1333,15 @@ def _get_max_mean_min_statistic( tail_start_time: datetime | None, tail_end_time: datetime | None, tail_only: bool, - metadata_id: int, + metadata: tuple[int, StatisticMetaData], types: set[Literal["max", "mean", "min", "change"]], ) -> dict[str, float | None]: """Return max, mean and min during the period. - The mean is a time weighted average, combining hourly and 5-minute statistics if + The mean is time weighted, combining hourly and 5-minute statistics if necessary. """ - max_mean_min: dict[str, float] = {} + max_mean_min = _MaxMinMeanStatisticSubPeriod(circular_means=[]) result: dict[str, float | None] = {} if tail_start_time is not None: @@ -1227,7 +1353,7 @@ def _get_max_mean_min_statistic( tail_end_time, StatisticsShortTerm, types, - metadata_id, + metadata, ) if not tail_only: @@ -1238,7 +1364,7 @@ def _get_max_mean_min_statistic( main_end_time, Statistics, types, - metadata_id, + metadata, ) if head_start_time is not None: @@ -1249,16 +1375,23 @@ def _get_max_mean_min_statistic( head_end_time, StatisticsShortTerm, types, - metadata_id, + metadata, ) if "max" in types: result["max"] = max_mean_min.get("max") if "mean" in types: - if "mean_acc" not in max_mean_min: - result["mean"] = None - else: - result["mean"] = max_mean_min["mean_acc"] / max_mean_min["duration"] + mean_value = None + match metadata[1]["mean_type"]: + case StatisticMeanType.CIRCULAR: + if circular_means := max_mean_min["circular_means"]: + mean_value = weighted_circular_mean(circular_means) + case StatisticMeanType.ARITHMETIC: + if (mean_value := max_mean_min.get("mean_acc")) is not None and ( + duration := max_mean_min.get("duration") + ) is not None: + mean_value = mean_value / duration + result["mean"] = mean_value if "min" in types: result["min"] = max_mean_min.get("min") return result @@ -1559,7 +1692,7 @@ def statistic_during_period( tail_start_time, tail_end_time, tail_only, - metadata_id, + metadata, types, ) @@ -1642,7 +1775,7 @@ def _extract_metadata_and_discard_impossible_columns( has_sum = False for metadata_id, stats_metadata in metadata.values(): metadata_ids.append(metadata_id) - has_mean |= stats_metadata["has_mean"] + has_mean |= stats_metadata["mean_type"] is not StatisticMeanType.NONE has_sum |= stats_metadata["has_sum"] if not has_mean: types.discard("mean") @@ -1798,13 +1931,13 @@ def _statistics_during_period_with_session( ) if period == "day": - result = _reduce_statistics_per_day(result, types) + result = _reduce_statistics_per_day(result, types, metadata) if period == "week": - result = _reduce_statistics_per_week(result, types) + result = _reduce_statistics_per_week(result, types, metadata) if period == "month": - result = _reduce_statistics_per_month(result, types) + result = _reduce_statistics_per_month(result, types, metadata) if "change" in _types: _augment_result_with_change( diff --git a/homeassistant/components/recorder/table_managers/statistics_meta.py b/homeassistant/components/recorder/table_managers/statistics_meta.py index 77fc34518db..634e9565c12 100644 --- a/homeassistant/components/recorder/table_managers/statistics_meta.py +++ b/homeassistant/components/recorder/table_managers/statistics_meta.py @@ -4,16 +4,18 @@ from __future__ import annotations import logging import threading -from typing import TYPE_CHECKING, Final, Literal +from typing import TYPE_CHECKING, Any, Final, Literal from lru import LRU from sqlalchemy import lambda_stmt, select +from sqlalchemy.orm import InstrumentedAttribute from sqlalchemy.orm.session import Session from sqlalchemy.sql.expression import true from sqlalchemy.sql.lambdas import StatementLambdaElement +from ..const import CIRCULAR_MEAN_SCHEMA_VERSION from ..db_schema import StatisticsMeta -from ..models import StatisticMetaData +from ..models import StatisticMeanType, StatisticMetaData from ..util import execute_stmt_lambda_element if TYPE_CHECKING: @@ -28,7 +30,6 @@ QUERY_STATISTIC_META = ( StatisticsMeta.statistic_id, StatisticsMeta.source, StatisticsMeta.unit_of_measurement, - StatisticsMeta.has_mean, StatisticsMeta.has_sum, StatisticsMeta.name, ) @@ -37,24 +38,38 @@ INDEX_ID: Final = 0 INDEX_STATISTIC_ID: Final = 1 INDEX_SOURCE: Final = 2 INDEX_UNIT_OF_MEASUREMENT: Final = 3 -INDEX_HAS_MEAN: Final = 4 -INDEX_HAS_SUM: Final = 5 -INDEX_NAME: Final = 6 +INDEX_HAS_SUM: Final = 4 +INDEX_NAME: Final = 5 +INDEX_MEAN_TYPE: Final = 6 def _generate_get_metadata_stmt( statistic_ids: set[str] | None = None, statistic_type: Literal["mean", "sum"] | None = None, statistic_source: str | None = None, + schema_version: int = 0, ) -> StatementLambdaElement: - """Generate a statement to fetch metadata.""" - stmt = lambda_stmt(lambda: select(*QUERY_STATISTIC_META)) + """Generate a statement to fetch metadata with the passed filters. + + Depending on the schema version, either mean_type (added in version 49) or has_mean column is used. + """ + columns: list[InstrumentedAttribute[Any]] = list(QUERY_STATISTIC_META) + if schema_version >= CIRCULAR_MEAN_SCHEMA_VERSION: + columns.append(StatisticsMeta.mean_type) + else: + columns.append(StatisticsMeta.has_mean) + stmt = lambda_stmt(lambda: select(*columns)) if statistic_ids: stmt += lambda q: q.where(StatisticsMeta.statistic_id.in_(statistic_ids)) if statistic_source is not None: stmt += lambda q: q.where(StatisticsMeta.source == statistic_source) if statistic_type == "mean": - stmt += lambda q: q.where(StatisticsMeta.has_mean == true()) + if schema_version >= CIRCULAR_MEAN_SCHEMA_VERSION: + stmt += lambda q: q.where( + StatisticsMeta.mean_type != StatisticMeanType.NONE + ) + else: + stmt += lambda q: q.where(StatisticsMeta.has_mean == true()) elif statistic_type == "sum": stmt += lambda q: q.where(StatisticsMeta.has_sum == true()) return stmt @@ -100,14 +115,34 @@ class StatisticsMetaManager: for row in execute_stmt_lambda_element( session, _generate_get_metadata_stmt( - statistic_ids, statistic_type, statistic_source + statistic_ids, + statistic_type, + statistic_source, + self.recorder.schema_version, ), orm_rows=False, ): statistic_id = row[INDEX_STATISTIC_ID] row_id = row[INDEX_ID] + if self.recorder.schema_version >= CIRCULAR_MEAN_SCHEMA_VERSION: + try: + mean_type = StatisticMeanType(row[INDEX_MEAN_TYPE]) + except ValueError: + _LOGGER.warning( + "Invalid mean type found for statistic_id: %s, mean_type: %s. Skipping", + statistic_id, + row[INDEX_MEAN_TYPE], + ) + continue + else: + mean_type = ( + StatisticMeanType.ARITHMETIC + if row[INDEX_MEAN_TYPE] + else StatisticMeanType.NONE + ) meta = { - "has_mean": row[INDEX_HAS_MEAN], + "has_mean": mean_type is StatisticMeanType.ARITHMETIC, + "mean_type": mean_type, "has_sum": row[INDEX_HAS_SUM], "name": row[INDEX_NAME], "source": row[INDEX_SOURCE], @@ -157,9 +192,18 @@ class StatisticsMetaManager: This call is not thread-safe and must be called from the recorder thread. """ + if "mean_type" not in new_metadata: + # To maintain backward compatibility after adding 'mean_type' in schema version 49, + # we must still check for its presence. Even though type hints suggest it should always exist, + # custom integrations might omit it, so we need to guard against that. + new_metadata["mean_type"] = ( # type: ignore[unreachable] + StatisticMeanType.ARITHMETIC + if new_metadata["has_mean"] + else StatisticMeanType.NONE + ) metadata_id, old_metadata = old_metadata_dict[statistic_id] if not ( - old_metadata["has_mean"] != new_metadata["has_mean"] + old_metadata["mean_type"] != new_metadata["mean_type"] or old_metadata["has_sum"] != new_metadata["has_sum"] or old_metadata["name"] != new_metadata["name"] or old_metadata["unit_of_measurement"] @@ -170,7 +214,7 @@ class StatisticsMetaManager: self._assert_in_recorder_thread() session.query(StatisticsMeta).filter_by(statistic_id=statistic_id).update( { - StatisticsMeta.has_mean: new_metadata["has_mean"], + StatisticsMeta.mean_type: new_metadata["mean_type"], StatisticsMeta.has_sum: new_metadata["has_sum"], StatisticsMeta.name: new_metadata["name"], StatisticsMeta.unit_of_measurement: new_metadata["unit_of_measurement"], diff --git a/homeassistant/components/recorder/websocket_api.py b/homeassistant/components/recorder/websocket_api.py index d23ecab3dac..f4058943971 100644 --- a/homeassistant/components/recorder/websocket_api.py +++ b/homeassistant/components/recorder/websocket_api.py @@ -37,7 +37,7 @@ from homeassistant.util.unit_conversion import ( VolumeFlowRateConverter, ) -from .models import StatisticPeriod +from .models import StatisticMeanType, StatisticPeriod from .statistics import ( STATISTIC_UNIT_TO_UNIT_CONVERTER, async_add_external_statistics, @@ -532,6 +532,10 @@ def ws_import_statistics( ) -> None: """Import statistics.""" metadata = msg["metadata"] + # The WS command will be changed in a follow up PR + metadata["mean_type"] = ( + StatisticMeanType.ARITHMETIC if metadata["has_mean"] else StatisticMeanType.NONE + ) stats = msg["stats"] if valid_entity_id(metadata["statistic_id"]): diff --git a/homeassistant/components/remote_calendar/config_flow.py b/homeassistant/components/remote_calendar/config_flow.py index 03d0e7ea96a..1ceeb7a3937 100644 --- a/homeassistant/components/remote_calendar/config_flow.py +++ b/homeassistant/components/remote_calendar/config_flow.py @@ -42,6 +42,10 @@ class RemoteCalendarConfigFlow(ConfigFlow, domain=DOMAIN): self._async_abort_entries_match( {CONF_CALENDAR_NAME: user_input[CONF_CALENDAR_NAME]} ) + if user_input[CONF_URL].startswith("webcal://"): + user_input[CONF_URL] = user_input[CONF_URL].replace( + "webcal://", "https://", 1 + ) self._async_abort_entries_match({CONF_URL: user_input[CONF_URL]}) client = get_async_client(self.hass) try: diff --git a/homeassistant/components/remote_calendar/manifest.json b/homeassistant/components/remote_calendar/manifest.json index fe17a3d2c34..256f5baf0ff 100644 --- a/homeassistant/components/remote_calendar/manifest.json +++ b/homeassistant/components/remote_calendar/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_polling", "loggers": ["ical"], "quality_scale": "silver", - "requirements": ["ical==9.0.1"] + "requirements": ["ical==9.0.3"] } diff --git a/homeassistant/components/renault/config_flow.py b/homeassistant/components/renault/config_flow.py index 70544a5637f..90d2c11613c 100644 --- a/homeassistant/components/renault/config_flow.py +++ b/homeassistant/components/renault/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +import logging from typing import Any import aiohttp @@ -16,6 +17,8 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from .const import CONF_KAMEREON_ACCOUNT_ID, CONF_LOCALE, DOMAIN from .renault_hub import RenaultHub +_LOGGER = logging.getLogger(__name__) + USER_SCHEMA = vol.Schema( { vol.Required(CONF_LOCALE): vol.In(AVAILABLE_LOCALES.keys()), @@ -54,7 +57,8 @@ class RenaultFlowHandler(ConfigFlow, domain=DOMAIN): ) except (aiohttp.ClientConnectionError, GigyaException): errors["base"] = "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: if login_success: diff --git a/homeassistant/components/renault/strings.json b/homeassistant/components/renault/strings.json index 8649a5c7b47..727e8cf32f1 100644 --- a/homeassistant/components/renault/strings.json +++ b/homeassistant/components/renault/strings.json @@ -118,7 +118,7 @@ "charge_ended": "Charge ended", "waiting_for_current_charge": "Waiting for current charge", "energy_flap_opened": "Energy flap opened", - "charge_in_progress": "Charging", + "charge_in_progress": "[%key:common::state::charging%]", "charge_error": "Not charging or plugged in", "unavailable": "Unavailable" } diff --git a/homeassistant/components/reolink/entity.py b/homeassistant/components/reolink/entity.py index 55ce4ce891e..ec598de663d 100644 --- a/homeassistant/components/reolink/entity.py +++ b/homeassistant/components/reolink/entity.py @@ -178,8 +178,13 @@ class ReolinkChannelCoordinatorEntity(ReolinkHostCoordinatorEntity): else: self._dev_id = f"{self._host.unique_id}_ch{dev_ch}" + connections = set() + if mac := self._host.api.baichuan.mac_address(dev_ch): + connections.add((CONNECTION_NETWORK_MAC, mac)) + self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, self._dev_id)}, + connections=connections, via_device=(DOMAIN, self._host.unique_id), name=self._host.api.camera_name(dev_ch), model=self._host.api.camera_model(dev_ch), diff --git a/homeassistant/components/reolink/icons.json b/homeassistant/components/reolink/icons.json index 00045c4cda2..7d1dba099ed 100644 --- a/homeassistant/components/reolink/icons.json +++ b/homeassistant/components/reolink/icons.json @@ -217,6 +217,21 @@ "ai_animal_sensitivity": { "default": "mdi:paw" }, + "crossline_sensitivity": { + "default": "mdi:fence" + }, + "intrusion_sensitivity": { + "default": "mdi:location-enter" + }, + "linger_sensitivity": { + "default": "mdi:account-switch" + }, + "forgotten_item_sensitivity": { + "default": "mdi:package-variant-closed-plus" + }, + "taken_item_sensitivity": { + "default": "mdi:package-variant-closed-minus" + }, "ai_face_delay": { "default": "mdi:face-recognition" }, @@ -235,6 +250,18 @@ "ai_animal_delay": { "default": "mdi:paw" }, + "intrusion_delay": { + "default": "mdi:location-enter" + }, + "linger_delay": { + "default": "mdi:account-switch" + }, + "forgotten_item_delay": { + "default": "mdi:package-variant-closed-plus" + }, + "taken_item_delay": { + "default": "mdi:package-variant-closed-minus" + }, "auto_quick_reply_time": { "default": "mdi:message-reply-text-outline" }, diff --git a/homeassistant/components/reolink/manifest.json b/homeassistant/components/reolink/manifest.json index 41cfe1f9ae3..82b9586cccc 100644 --- a/homeassistant/components/reolink/manifest.json +++ b/homeassistant/components/reolink/manifest.json @@ -19,5 +19,5 @@ "iot_class": "local_push", "loggers": ["reolink_aio"], "quality_scale": "platinum", - "requirements": ["reolink-aio==0.12.3"] + "requirements": ["reolink-aio==0.13.0"] } diff --git a/homeassistant/components/reolink/number.py b/homeassistant/components/reolink/number.py index 48382df4cbc..2a6fb740ee0 100644 --- a/homeassistant/components/reolink/number.py +++ b/homeassistant/components/reolink/number.py @@ -9,6 +9,7 @@ from typing import Any from reolink_aio.api import Chime, Host from homeassistant.components.number import ( + NumberDeviceClass, NumberEntity, NumberEntityDescription, NumberMode, @@ -44,6 +45,19 @@ class ReolinkNumberEntityDescription( value: Callable[[Host, int], float | None] +@dataclass(frozen=True, kw_only=True) +class ReolinkSmartAINumberEntityDescription( + NumberEntityDescription, + ReolinkChannelEntityDescription, +): + """A class that describes smart AI number entities.""" + + smart_type: str + method: Callable[[Host, int, int, float], Any] + mode: NumberMode = NumberMode.AUTO + value: Callable[[Host, int, int], float | None] + + @dataclass(frozen=True, kw_only=True) class ReolinkHostNumberEntityDescription( NumberEntityDescription, @@ -125,6 +139,7 @@ NUMBER_ENTITIES = ( cmd_key="GetPtzGuard", translation_key="guard_return_time", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, native_min_value=10, @@ -248,6 +263,7 @@ NUMBER_ENTITIES = ( cmd_key="GetAiAlarm", translation_key="ai_face_delay", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, entity_registry_enabled_default=False, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, @@ -264,6 +280,7 @@ NUMBER_ENTITIES = ( cmd_key="GetAiAlarm", translation_key="ai_person_delay", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, entity_registry_enabled_default=False, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, @@ -280,6 +297,7 @@ NUMBER_ENTITIES = ( cmd_key="GetAiAlarm", translation_key="ai_vehicle_delay", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, entity_registry_enabled_default=False, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, @@ -296,6 +314,7 @@ NUMBER_ENTITIES = ( cmd_key="GetAiAlarm", translation_key="ai_package_delay", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, entity_registry_enabled_default=False, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, @@ -312,6 +331,7 @@ NUMBER_ENTITIES = ( cmd_key="GetAiAlarm", translation_key="ai_pet_delay", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, entity_registry_enabled_default=False, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, @@ -330,6 +350,7 @@ NUMBER_ENTITIES = ( cmd_key="GetAiAlarm", translation_key="ai_animal_delay", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, entity_registry_enabled_default=False, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, @@ -346,6 +367,7 @@ NUMBER_ENTITIES = ( cmd_key="GetAutoReply", translation_key="auto_quick_reply_time", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, native_min_value=1, @@ -385,6 +407,7 @@ NUMBER_ENTITIES = ( cmd_key="GetAiCfg", translation_key="auto_track_disappear_time", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, native_min_value=1, @@ -400,6 +423,7 @@ NUMBER_ENTITIES = ( cmd_key="GetAiCfg", translation_key="auto_track_stop_time", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, native_min_value=1, @@ -493,6 +517,168 @@ NUMBER_ENTITIES = ( ), ) +SMART_AI_NUMBER_ENTITIES = ( + ReolinkSmartAINumberEntityDescription( + key="crossline_sensitivity", + smart_type="crossline", + cmd_id=527, + translation_key="crossline_sensitivity", + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + native_step=1, + native_min_value=0, + native_max_value=100, + supported=lambda api, ch: api.supported(ch, "ai_crossline"), + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_sensitivity(ch, "crossline", loc) + ), + method=lambda api, ch, loc, value: api.baichuan.set_smart_ai( + ch, "crossline", loc, sensitivity=int(value) + ), + ), + ReolinkSmartAINumberEntityDescription( + key="intrusion_sensitivity", + smart_type="intrusion", + cmd_id=529, + translation_key="intrusion_sensitivity", + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + native_step=1, + native_min_value=0, + native_max_value=100, + supported=lambda api, ch: api.supported(ch, "ai_intrusion"), + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_sensitivity(ch, "intrusion", loc) + ), + method=lambda api, ch, loc, value: api.baichuan.set_smart_ai( + ch, "intrusion", loc, sensitivity=int(value) + ), + ), + ReolinkSmartAINumberEntityDescription( + key="linger_sensitivity", + smart_type="loitering", + cmd_id=531, + translation_key="linger_sensitivity", + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + native_step=1, + native_min_value=0, + native_max_value=100, + supported=lambda api, ch: api.supported(ch, "ai_linger"), + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_sensitivity(ch, "loitering", loc) + ), + method=lambda api, ch, loc, value: api.baichuan.set_smart_ai( + ch, "loitering", loc, sensitivity=int(value) + ), + ), + ReolinkSmartAINumberEntityDescription( + key="forgotten_item_sensitivity", + smart_type="legacy", + cmd_id=549, + translation_key="forgotten_item_sensitivity", + entity_registry_enabled_default=False, + entity_category=EntityCategory.CONFIG, + native_step=1, + native_min_value=0, + native_max_value=100, + supported=lambda api, ch: api.supported(ch, "ai_forgotten_item"), + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_sensitivity(ch, "legacy", loc) + ), + method=lambda api, ch, loc, value: api.baichuan.set_smart_ai( + ch, "legacy", loc, sensitivity=int(value) + ), + ), + ReolinkSmartAINumberEntityDescription( + key="taken_item_sensitivity", + smart_type="loss", + cmd_id=551, + translation_key="taken_item_sensitivity", + entity_registry_enabled_default=False, + entity_category=EntityCategory.CONFIG, + native_step=1, + native_min_value=0, + native_max_value=100, + supported=lambda api, ch: api.supported(ch, "ai_taken_item"), + value=lambda api, ch, loc: api.baichuan.smart_ai_sensitivity(ch, "loss", loc), + method=lambda api, ch, loc, value: api.baichuan.set_smart_ai( + ch, "loss", loc, sensitivity=int(value) + ), + ), + ReolinkSmartAINumberEntityDescription( + key="intrusion_delay", + smart_type="intrusion", + cmd_id=529, + translation_key="intrusion_delay", + entity_registry_enabled_default=False, + entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, + native_step=1, + native_unit_of_measurement=UnitOfTime.SECONDS, + native_min_value=0, + native_max_value=10, + supported=lambda api, ch: api.supported(ch, "ai_intrusion"), + value=lambda api, ch, loc: api.baichuan.smart_ai_delay(ch, "intrusion", loc), + method=lambda api, ch, loc, value: api.baichuan.set_smart_ai( + ch, "intrusion", loc, delay=int(value) + ), + ), + ReolinkSmartAINumberEntityDescription( + key="linger_delay", + smart_type="loitering", + cmd_id=531, + translation_key="linger_delay", + entity_registry_enabled_default=False, + entity_category=EntityCategory.CONFIG, + native_step=1, + native_unit_of_measurement=UnitOfTime.SECONDS, + native_min_value=1, + native_max_value=10, + supported=lambda api, ch: api.supported(ch, "ai_linger"), + value=lambda api, ch, loc: api.baichuan.smart_ai_delay(ch, "loitering", loc), + method=lambda api, ch, loc, value: api.baichuan.set_smart_ai( + ch, "loitering", loc, delay=int(value) + ), + ), + ReolinkSmartAINumberEntityDescription( + key="forgotten_item_delay", + smart_type="legacy", + cmd_id=549, + translation_key="forgotten_item_delay", + entity_registry_enabled_default=False, + entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, + native_step=1, + native_unit_of_measurement=UnitOfTime.SECONDS, + native_min_value=1, + native_max_value=30, + supported=lambda api, ch: api.supported(ch, "ai_forgotten_item"), + value=lambda api, ch, loc: api.baichuan.smart_ai_delay(ch, "legacy", loc), + method=lambda api, ch, loc, value: api.baichuan.set_smart_ai( + ch, "legacy", loc, delay=int(value) + ), + ), + ReolinkSmartAINumberEntityDescription( + key="taken_item_delay", + smart_type="loss", + cmd_id=551, + translation_key="taken_item_delay", + entity_registry_enabled_default=False, + entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, + native_step=1, + native_unit_of_measurement=UnitOfTime.SECONDS, + native_min_value=1, + native_max_value=30, + supported=lambda api, ch: api.supported(ch, "ai_taken_item"), + value=lambda api, ch, loc: api.baichuan.smart_ai_delay(ch, "loss", loc), + method=lambda api, ch, loc, value: api.baichuan.set_smart_ai( + ch, "loss", loc, delay=int(value) + ), + ), +) + HOST_NUMBER_ENTITIES = ( ReolinkHostNumberEntityDescription( key="alarm_volume", @@ -542,22 +728,32 @@ async def async_setup_entry( ) -> None: """Set up a Reolink number entities.""" reolink_data: ReolinkData = config_entry.runtime_data + api = reolink_data.host.api entities: list[NumberEntity] = [ ReolinkNumberEntity(reolink_data, channel, entity_description) for entity_description in NUMBER_ENTITIES - for channel in reolink_data.host.api.channels - if entity_description.supported(reolink_data.host.api, channel) + for channel in api.channels + if entity_description.supported(api, channel) ] + entities.extend( + ReolinkSmartAINumberEntity(reolink_data, channel, location, entity_description) + for entity_description in SMART_AI_NUMBER_ENTITIES + for channel in api.channels + for location in api.baichuan.smart_location_list( + channel, entity_description.smart_type + ) + if entity_description.supported(api, channel) + ) entities.extend( ReolinkHostNumberEntity(reolink_data, entity_description) for entity_description in HOST_NUMBER_ENTITIES - if entity_description.supported(reolink_data.host.api) + if entity_description.supported(api) ) entities.extend( ReolinkChimeNumberEntity(reolink_data, chime, entity_description) for entity_description in CHIME_NUMBER_ENTITIES - for chime in reolink_data.host.api.chime_list + for chime in api.chime_list ) async_add_entities(entities) @@ -599,6 +795,51 @@ class ReolinkNumberEntity(ReolinkChannelCoordinatorEntity, NumberEntity): self.async_write_ha_state() +class ReolinkSmartAINumberEntity(ReolinkChannelCoordinatorEntity, NumberEntity): + """Base smart AI number entity class for Reolink IP cameras.""" + + entity_description: ReolinkSmartAINumberEntityDescription + + def __init__( + self, + reolink_data: ReolinkData, + channel: int, + location: int, + entity_description: ReolinkSmartAINumberEntityDescription, + ) -> None: + """Initialize Reolink number entity.""" + self.entity_description = entity_description + super().__init__(reolink_data, channel) + + unique_index = self._host.api.baichuan.smart_ai_index( + channel, entity_description.smart_type, location + ) + self._attr_unique_id = f"{self._attr_unique_id}_{unique_index}" + + self._location = location + self._attr_mode = entity_description.mode + self._attr_translation_placeholders = { + "zone_name": self._host.api.baichuan.smart_ai_name( + channel, entity_description.smart_type, location + ) + } + + @property + def native_value(self) -> float | None: + """State of the number entity.""" + return self.entity_description.value( + self._host.api, self._channel, self._location + ) + + @raise_translated_error + async def async_set_native_value(self, value: float) -> None: + """Update the current value.""" + await self.entity_description.method( + self._host.api, self._channel, self._location, value + ) + self.async_write_ha_state() + + class ReolinkHostNumberEntity(ReolinkHostCoordinatorEntity, NumberEntity): """Base number entity class for Reolink Host.""" diff --git a/homeassistant/components/reolink/strings.json b/homeassistant/components/reolink/strings.json index 7ad2e1ea217..9a6db7b5d67 100644 --- a/homeassistant/components/reolink/strings.json +++ b/homeassistant/components/reolink/strings.json @@ -103,6 +103,12 @@ }, "config_entry_not_ready": { "message": "Error while trying to set up {host}: {err}" + }, + "update_already_running": { + "message": "Reolink firmware update already running, wait on completion before starting another" + }, + "firmware_rate_limit": { + "message": "Reolink firmware update server reached hourly rate limit: updating can be tried again in 1 hour" } }, "issues": { @@ -562,6 +568,21 @@ "ai_animal_sensitivity": { "name": "AI animal sensitivity" }, + "crossline_sensitivity": { + "name": "AI crossline {zone_name} sensitivity" + }, + "intrusion_sensitivity": { + "name": "AI intrusion {zone_name} sensitivity" + }, + "linger_sensitivity": { + "name": "AI linger {zone_name} sensitivity" + }, + "forgotten_item_sensitivity": { + "name": "AI item forgotten {zone_name} sensitivity" + }, + "taken_item_sensitivity": { + "name": "AI item taken {zone_name} sensitivity" + }, "ai_face_delay": { "name": "AI face delay" }, @@ -580,6 +601,18 @@ "ai_animal_delay": { "name": "AI animal delay" }, + "intrusion_delay": { + "name": "AI intrusion {zone_name} delay" + }, + "linger_delay": { + "name": "AI linger {zone_name} delay" + }, + "forgotten_item_delay": { + "name": "AI item forgotten {zone_name} delay" + }, + "taken_item_delay": { + "name": "AI item taken {zone_name} delay" + }, "auto_quick_reply_time": { "name": "Auto quick reply time" }, diff --git a/homeassistant/components/reolink/switch.py b/homeassistant/components/reolink/switch.py index 0f106c0f2cc..af87a75eece 100644 --- a/homeassistant/components/reolink/switch.py +++ b/homeassistant/components/reolink/switch.py @@ -162,6 +162,7 @@ SWITCH_ENTITIES = ( ReolinkSwitchEntityDescription( key="manual_record", cmd_key="GetManualRec", + cmd_id=588, translation_key="manual_record", entity_category=EntityCategory.CONFIG, supported=lambda api, ch: api.supported(ch, "manual_record"), diff --git a/homeassistant/components/reolink/update.py b/homeassistant/components/reolink/update.py index 0744d66fb5b..a7c883003b7 100644 --- a/homeassistant/components/reolink/update.py +++ b/homeassistant/components/reolink/update.py @@ -31,7 +31,7 @@ from .entity import ( ReolinkHostCoordinatorEntity, ReolinkHostEntityDescription, ) -from .util import ReolinkConfigEntry, ReolinkData +from .util import ReolinkConfigEntry, ReolinkData, raise_translated_error PARALLEL_UPDATES = 0 RESUME_AFTER_INSTALL = 15 @@ -184,6 +184,7 @@ class ReolinkUpdateBaseEntity( f"## Release notes\n\n{new_firmware.release_notes}" ) + @raise_translated_error async def async_install( self, version: str | None, backup: bool, **kwargs: Any ) -> None: @@ -196,6 +197,8 @@ class ReolinkUpdateBaseEntity( try: await self._host.api.update_firmware(self._channel) except ReolinkError as err: + if err.translation_key: + raise raise HomeAssistantError( translation_domain=DOMAIN, translation_key="firmware_install_error", diff --git a/homeassistant/components/reolink/util.py b/homeassistant/components/reolink/util.py index a5556b66a33..12b4825caeb 100644 --- a/homeassistant/components/reolink/util.py +++ b/homeassistant/components/reolink/util.py @@ -27,6 +27,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import device_registry as dr from homeassistant.helpers.storage import Store +from homeassistant.helpers.translation import async_get_exception_message from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from .const import DOMAIN @@ -97,6 +98,30 @@ def get_device_uid_and_ch( return (device_uid, ch, is_chime) +def check_translation_key(err: ReolinkError) -> str | None: + """Check if the translation key from the upstream library is present.""" + if not err.translation_key: + return None + if async_get_exception_message(DOMAIN, err.translation_key) == err.translation_key: + # translation key not found in strings.json + return None + return err.translation_key + + +_EXCEPTION_TO_TRANSLATION_KEY = { + ApiError: "api_error", + InvalidContentTypeError: "invalid_content_type", + CredentialsInvalidError: "invalid_credentials", + LoginError: "login_error", + NoDataError: "no_data", + UnexpectedDataError: "unexpected_data", + NotSupportedError: "not_supported", + SubscriptionError: "subscription_error", + ReolinkConnectionError: "connection_error", + ReolinkTimeoutError: "timeout", +} + + # Decorators def raise_translated_error[**P, R]( func: Callable[P, Awaitable[R]], @@ -110,73 +135,14 @@ def raise_translated_error[**P, R]( except InvalidParameterError as err: raise ServiceValidationError( translation_domain=DOMAIN, - translation_key="invalid_parameter", - translation_placeholders={"err": str(err)}, - ) from err - except ApiError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="api_error", - translation_placeholders={"err": str(err)}, - ) from err - except InvalidContentTypeError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="invalid_content_type", - translation_placeholders={"err": str(err)}, - ) from err - except CredentialsInvalidError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="invalid_credentials", - translation_placeholders={"err": str(err)}, - ) from err - except LoginError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="login_error", - translation_placeholders={"err": str(err)}, - ) from err - except NoDataError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="no_data", - translation_placeholders={"err": str(err)}, - ) from err - except UnexpectedDataError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="unexpected_data", - translation_placeholders={"err": str(err)}, - ) from err - except NotSupportedError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="not_supported", - translation_placeholders={"err": str(err)}, - ) from err - except SubscriptionError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="subscription_error", - translation_placeholders={"err": str(err)}, - ) from err - except ReolinkConnectionError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="connection_error", - translation_placeholders={"err": str(err)}, - ) from err - except ReolinkTimeoutError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="timeout", + translation_key=check_translation_key(err) or "invalid_parameter", translation_placeholders={"err": str(err)}, ) from err except ReolinkError as err: raise HomeAssistantError( translation_domain=DOMAIN, - translation_key="unexpected", + translation_key=check_translation_key(err) + or _EXCEPTION_TO_TRANSLATION_KEY.get(type(err), "unexpected"), translation_placeholders={"err": str(err)}, ) from err diff --git a/homeassistant/components/rflink/sensor.py b/homeassistant/components/rflink/sensor.py index 027c39da70f..97d0b811509 100644 --- a/homeassistant/components/rflink/sensor.py +++ b/homeassistant/components/rflink/sensor.py @@ -236,7 +236,8 @@ SENSOR_TYPES = ( key="winddirection", name="Wind direction", icon="mdi:compass", - state_class=SensorStateClass.MEASUREMENT, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + device_class=SensorDeviceClass.WIND_DIRECTION, native_unit_of_measurement=DEGREE, ), SensorEntityDescription( diff --git a/homeassistant/components/rfxtrx/sensor.py b/homeassistant/components/rfxtrx/sensor.py index 4b256279445..6669b1367df 100644 --- a/homeassistant/components/rfxtrx/sensor.py +++ b/homeassistant/components/rfxtrx/sensor.py @@ -161,7 +161,8 @@ SENSOR_TYPES = ( RfxtrxSensorEntityDescription( key="Wind direction", translation_key="wind_direction", - state_class=SensorStateClass.MEASUREMENT, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + device_class=SensorDeviceClass.WIND_DIRECTION, native_unit_of_measurement=DEGREE, ), RfxtrxSensorEntityDescription( diff --git a/homeassistant/components/rfxtrx/strings.json b/homeassistant/components/rfxtrx/strings.json index db4efad5bb4..d0a61540a53 100644 --- a/homeassistant/components/rfxtrx/strings.json +++ b/homeassistant/components/rfxtrx/strings.json @@ -105,15 +105,15 @@ "sound_15": "Sound 15", "down": "Down", "up": "Up", - "all_off": "All Off", - "all_on": "All On", + "all_off": "All off", + "all_on": "All on", "scene": "Scene", - "off": "Off", - "on": "On", + "off": "[%key:common::state::off%]", + "on": "[%key:common::state::on%]", "dim": "Dim", "bright": "Bright", - "all_group_off": "All/group Off", - "all_group_on": "All/group On", + "all_group_off": "All/group off", + "all_group_on": "All/group on", "chime": "Chime", "illegal_command": "Illegal command", "set_level": "Set level", diff --git a/homeassistant/components/roborock/__init__.py b/homeassistant/components/roborock/__init__.py index 8140b58b86c..81b412c6770 100644 --- a/homeassistant/components/roborock/__init__.py +++ b/homeassistant/components/roborock/__init__.py @@ -164,6 +164,31 @@ async def async_setup_entry(hass: HomeAssistant, entry: RoborockConfigEntry) -> return True +async def async_migrate_entry(hass: HomeAssistant, entry: RoborockConfigEntry) -> bool: + """Migrate old configuration entries to the new format.""" + _LOGGER.debug( + "Migrating configuration from version %s.%s", + entry.version, + entry.minor_version, + ) + if entry.version > 1: + # Downgrade from future version + return False + + # 1->2: Migrate from unique id as email address to unique id as rruid + if entry.minor_version == 1: + user_data = UserData.from_dict(entry.data[CONF_USER_DATA]) + _LOGGER.debug("Updating unique id to %s", user_data.rruid) + hass.config_entries.async_update_entry( + entry, + unique_id=user_data.rruid, + version=1, + minor_version=2, + ) + + return True + + def build_setup_functions( hass: HomeAssistant, entry: RoborockConfigEntry, diff --git a/homeassistant/components/roborock/config_flow.py b/homeassistant/components/roborock/config_flow.py index 1a359faca10..62943e0dcc9 100644 --- a/homeassistant/components/roborock/config_flow.py +++ b/homeassistant/components/roborock/config_flow.py @@ -48,6 +48,7 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a config flow for Roborock.""" VERSION = 1 + MINOR_VERSION = 2 def __init__(self) -> None: """Initialize the config flow.""" @@ -62,8 +63,6 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN): if user_input is not None: username = user_input[CONF_USERNAME] - await self.async_set_unique_id(username.lower()) - self._abort_if_unique_id_configured(error="already_configured_account") self._username = username _LOGGER.debug("Requesting code for Roborock account") self._client = RoborockApiClient( @@ -111,7 +110,7 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN): code = user_input[CONF_ENTRY_CODE] _LOGGER.debug("Logging into Roborock account using email provided code") try: - login_data = await self._client.code_login(code) + user_data = await self._client.code_login(code) except RoborockInvalidCode: errors["base"] = "invalid_code" except RoborockException: @@ -121,17 +120,20 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: + await self.async_set_unique_id(user_data.rruid) if self.source == SOURCE_REAUTH: + self._abort_if_unique_id_mismatch(reason="wrong_account") reauth_entry = self._get_reauth_entry() self.hass.config_entries.async_update_entry( reauth_entry, data={ **reauth_entry.data, - CONF_USER_DATA: login_data.as_dict(), + CONF_USER_DATA: user_data.as_dict(), }, ) return self.async_abort(reason="reauth_successful") - return self._create_entry(self._client, self._username, login_data) + self._abort_if_unique_id_configured(error="already_configured_account") + return self._create_entry(self._client, self._username, user_data) return self.async_show_form( step_id="code", @@ -143,6 +145,7 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN): self, discovery_info: DhcpServiceInfo ) -> ConfigFlowResult: """Handle a flow started by a dhcp discovery.""" + await self._async_handle_discovery_without_unique_id() device_registry = dr.async_get(self.hass) device = device_registry.async_get_device( connections={ diff --git a/homeassistant/components/roborock/manifest.json b/homeassistant/components/roborock/manifest.json index 60036edb0bc..531590d5d6e 100644 --- a/homeassistant/components/roborock/manifest.json +++ b/homeassistant/components/roborock/manifest.json @@ -17,6 +17,7 @@ "documentation": "https://www.home-assistant.io/integrations/roborock", "iot_class": "local_polling", "loggers": ["roborock"], + "quality_scale": "silver", "requirements": [ "python-roborock==2.16.1", "vacuum-map-parser-roborock==0.1.2" diff --git a/homeassistant/components/roborock/quality_scale.yaml b/homeassistant/components/roborock/quality_scale.yaml index d064c30ccf6..32ddb145f90 100644 --- a/homeassistant/components/roborock/quality_scale.yaml +++ b/homeassistant/components/roborock/quality_scale.yaml @@ -21,7 +21,7 @@ rules: test-before-setup: done unique-config-entry: done # Silver - action-exceptions: todo + action-exceptions: done config-entry-unloading: done docs-configuration-parameters: done docs-installation-parameters: done @@ -29,7 +29,7 @@ rules: integration-owner: done log-when-unavailable: done parallel-updates: done - reauthentication-flow: todo + reauthentication-flow: done test-coverage: done # Gold devices: done diff --git a/homeassistant/components/roborock/strings.json b/homeassistant/components/roborock/strings.json index caad67e4ce6..4546856ec8b 100644 --- a/homeassistant/components/roborock/strings.json +++ b/homeassistant/components/roborock/strings.json @@ -35,7 +35,8 @@ }, "abort": { "already_configured_account": "[%key:common::config_flow::abort::already_configured_account%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "wrong_account": "Wrong account: Please authenticate with the right account." } }, "options": { @@ -338,7 +339,7 @@ "zeo_state": { "name": "State", "state": { - "standby": "Standby", + "standby": "[%key:common::state::standby%]", "weighing": "Weighing", "soaking": "Soaking", "washing": "Washing", diff --git a/homeassistant/components/roku/strings.json b/homeassistant/components/roku/strings.json index 04348bc3bfb..62f1f8b1736 100644 --- a/homeassistant/components/roku/strings.json +++ b/homeassistant/components/roku/strings.json @@ -47,7 +47,7 @@ "name": "Supports AirPlay" }, "supports_ethernet": { - "name": "Supports ethernet" + "name": "Supports Ethernet" }, "supports_find_remote": { "name": "Supports find remote" diff --git a/homeassistant/components/route53/manifest.json b/homeassistant/components/route53/manifest.json index 978c916e3ee..8c21b856b80 100644 --- a/homeassistant/components/route53/manifest.json +++ b/homeassistant/components/route53/manifest.json @@ -6,5 +6,5 @@ "iot_class": "cloud_push", "loggers": ["boto3", "botocore", "s3transfer"], "quality_scale": "legacy", - "requirements": ["boto3==1.34.131"] + "requirements": ["boto3==1.37.1"] } diff --git a/homeassistant/components/schlage/strings.json b/homeassistant/components/schlage/strings.json index 56e72c2d2c0..42bd51de9d0 100644 --- a/homeassistant/components/schlage/strings.json +++ b/homeassistant/components/schlage/strings.json @@ -33,9 +33,9 @@ }, "select": { "auto_lock_time": { - "name": "Auto-Lock time", + "name": "Auto-lock time", "state": { - "0": "Disabled", + "0": "[%key:common::state::disabled%]", "15": "15 seconds", "30": "30 seconds", "60": "1 minute", diff --git a/homeassistant/components/sensibo/strings.json b/homeassistant/components/sensibo/strings.json index 6aba2be52fc..0fbcda461c8 100644 --- a/homeassistant/components/sensibo/strings.json +++ b/homeassistant/components/sensibo/strings.json @@ -330,7 +330,7 @@ "timer_on_switch": { "name": "Timer", "state_attributes": { - "id": { "name": "Id" }, + "id": { "name": "ID" }, "turn_on": { "name": "Turns on", "state": { @@ -594,7 +594,7 @@ "issues": { "deprecated_entity_horizontalswing": { "title": "The Sensibo {name} entity is deprecated", - "description": "The Sensibo entity `{entity}` is deprecated and will be removed in a future release.\nPlease update your automations and scripts to use the `horizontal_swing` attribute part of the `climate` entity instead.\n, Disable the `{entity}` and reload the config entry or restart Home Assistant to fix this issue." + "description": "The Sensibo entity `{entity}` is deprecated and will be removed in a future release.\nPlease update your automations and scripts to use the `horizontal_swing` attribute part of the `climate` entity instead.\nDisable `{entity}` and reload the config entry or restart Home Assistant to fix this issue." } } } diff --git a/homeassistant/components/sensor/__init__.py b/homeassistant/components/sensor/__init__.py index e3ee566a855..e06ee85cd03 100644 --- a/homeassistant/components/sensor/__init__.py +++ b/homeassistant/components/sensor/__init__.py @@ -44,6 +44,7 @@ from .const import ( # noqa: F401 DEVICE_CLASSES_SCHEMA, DOMAIN, NON_NUMERIC_DEVICE_CLASSES, + STATE_CLASS_UNITS, STATE_CLASSES, STATE_CLASSES_SCHEMA, UNIT_CONVERTERS, @@ -713,6 +714,18 @@ class SensorEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): report_issue, ) + # Validate unit of measurement used for sensors with a state class + if ( + state_class + and (units := STATE_CLASS_UNITS.get(state_class)) is not None + and native_unit_of_measurement not in units + ): + raise ValueError( + f"Sensor {self.entity_id} ({type(self)}) is using native unit of " + f"measurement '{native_unit_of_measurement}' which is not a valid unit " + f"for the state class ('{state_class}') it is using; expected one of {units};" + ) + return value def _display_precision_or_none(self) -> int | None: diff --git a/homeassistant/components/sensor/const.py b/homeassistant/components/sensor/const.py index e1f7dd13d93..63af8e5bf52 100644 --- a/homeassistant/components/sensor/const.py +++ b/homeassistant/components/sensor/const.py @@ -491,6 +491,9 @@ class SensorStateClass(StrEnum): MEASUREMENT = "measurement" """The state represents a measurement in present time.""" + MEASUREMENT_ANGLE = "measurement_angle" + """The state represents a angle measurement in present time. Currently only degrees are supported.""" + TOTAL = "total" """The state represents a total amount. @@ -693,6 +696,11 @@ DEVICE_CLASS_STATE_CLASSES: dict[SensorDeviceClass, set[SensorStateClass]] = { SensorStateClass.TOTAL, SensorStateClass.TOTAL_INCREASING, }, - SensorDeviceClass.WIND_DIRECTION: set(), + SensorDeviceClass.WIND_DIRECTION: {SensorStateClass.MEASUREMENT_ANGLE}, SensorDeviceClass.WIND_SPEED: {SensorStateClass.MEASUREMENT}, } + + +STATE_CLASS_UNITS: dict[SensorStateClass | str, set[type[StrEnum] | str | None]] = { + SensorStateClass.MEASUREMENT_ANGLE: {DEGREE}, +} diff --git a/homeassistant/components/sensor/recorder.py b/homeassistant/components/sensor/recorder.py index 4e8e27e0c79..cb80fa7d2ce 100644 --- a/homeassistant/components/sensor/recorder.py +++ b/homeassistant/components/sensor/recorder.py @@ -5,6 +5,7 @@ from __future__ import annotations from collections import defaultdict from collections.abc import Callable, Iterable from contextlib import suppress +from dataclasses import dataclass import datetime import itertools import logging @@ -21,6 +22,7 @@ from homeassistant.components.recorder import ( ) from homeassistant.components.recorder.models import ( StatisticData, + StatisticMeanType, StatisticMetaData, StatisticResult, ) @@ -52,10 +54,22 @@ from .const import ( _LOGGER = logging.getLogger(__name__) + +@dataclass +class _StatisticsConfig: + types: set[str] + mean_type: StatisticMeanType = StatisticMeanType.NONE + + DEFAULT_STATISTICS = { - SensorStateClass.MEASUREMENT: {"mean", "min", "max"}, - SensorStateClass.TOTAL: {"sum"}, - SensorStateClass.TOTAL_INCREASING: {"sum"}, + SensorStateClass.MEASUREMENT: _StatisticsConfig( + {"mean", "min", "max"}, StatisticMeanType.ARITHMETIC + ), + SensorStateClass.MEASUREMENT_ANGLE: _StatisticsConfig( + {"mean"}, StatisticMeanType.CIRCULAR + ), + SensorStateClass.TOTAL: _StatisticsConfig({"sum"}), + SensorStateClass.TOTAL_INCREASING: _StatisticsConfig({"sum"}), } EQUIVALENT_UNITS = { @@ -76,8 +90,15 @@ WARN_NEGATIVE: HassKey[set[str]] = HassKey(f"{DOMAIN}_warn_total_increasing_nega # Keep track of entities for which a warning about unsupported unit has been logged WARN_UNSUPPORTED_UNIT: HassKey[set[str]] = HassKey(f"{DOMAIN}_warn_unsupported_unit") WARN_UNSTABLE_UNIT: HassKey[set[str]] = HassKey(f"{DOMAIN}_warn_unstable_unit") +# Keep track of entities for which a warning about statistics mean algorithm change has been logged +WARN_STATISTICS_MEAN_CHANGED: HassKey[set[str]] = HassKey( + f"{DOMAIN}_warn_statistics_mean_change" +) # Link to dev statistics where issues around LTS can be fixed LINK_DEV_STATISTICS = "https://my.home-assistant.io/redirect/developer_statistics" +STATE_CLASS_REMOVED_ISSUE = "state_class_removed" +UNITS_CHANGED_ISSUE = "units_changed" +MEAN_TYPE_CHANGED_ISSUE = "mean_type_changed" def _get_sensor_states(hass: HomeAssistant) -> list[State]: @@ -99,7 +120,7 @@ def _get_sensor_states(hass: HomeAssistant) -> list[State]: ] -def _time_weighted_average( +def _time_weighted_arithmetic_mean( fstates: list[tuple[float, State]], start: datetime.datetime, end: datetime.datetime ) -> float: """Calculate a time weighted average. @@ -137,6 +158,43 @@ def _time_weighted_average( return accumulated / (end - start).total_seconds() +def _time_weighted_circular_mean( + fstates: list[tuple[float, State]], start: datetime.datetime, end: datetime.datetime +) -> float: + """Calculate a time weighted circular mean. + + The circular mean is calculated by weighting the states by duration in seconds between + state changes. + Note: there's no interpolation of values between state changes. + """ + old_fstate: float | None = None + old_start_time: datetime.datetime | None = None + values: list[tuple[float, float]] = [] + + for fstate, state in fstates: + # The recorder will give us the last known state, which may be well + # before the requested start time for the statistics + start_time = max(state.last_updated, start) + if old_start_time is None: + # Adjust start time, if there was no last known state + start = start_time + else: + duration = (start_time - old_start_time).total_seconds() + assert old_fstate is not None + values.append((old_fstate, duration)) + + old_fstate = fstate + old_start_time = start_time + + if old_fstate is not None: + # Add last value weighted by duration until end of the period + assert old_start_time is not None + duration = (end - old_start_time).total_seconds() + values.append((old_fstate, duration)) + + return statistics.weighted_circular_mean(values) + + def _get_units(fstates: list[tuple[float, State]]) -> set[str | None]: """Return a set of all units.""" return {item[1].attributes.get(ATTR_UNIT_OF_MEASUREMENT) for item in fstates} @@ -362,7 +420,7 @@ def reset_detected( return fstate < 0.9 * previous_fstate -def _wanted_statistics(sensor_states: list[State]) -> dict[str, set[str]]: +def _wanted_statistics(sensor_states: list[State]) -> dict[str, _StatisticsConfig]: """Prepare a dict with wanted statistics for entities.""" return { state.entity_id: DEFAULT_STATISTICS[state.attributes[ATTR_STATE_CLASS]] @@ -406,7 +464,9 @@ def compile_statistics( # noqa: C901 wanted_statistics = _wanted_statistics(sensor_states) # Get history between start and end entities_full_history = [ - i.entity_id for i in sensor_states if "sum" in wanted_statistics[i.entity_id] + i.entity_id + for i in sensor_states + if "sum" in wanted_statistics[i.entity_id].types ] history_list: dict[str, list[State]] = {} if entities_full_history: @@ -421,7 +481,7 @@ def compile_statistics( # noqa: C901 entities_significant_history = [ i.entity_id for i in sensor_states - if "sum" not in wanted_statistics[i.entity_id] + if "sum" not in wanted_statistics[i.entity_id].types ] if entities_significant_history: _history_list = history.get_full_significant_states_with_session( @@ -471,7 +531,7 @@ def compile_statistics( # noqa: C901 continue state_class: str = _state.attributes[ATTR_STATE_CLASS] to_process.append((entity_id, statistics_unit, state_class, valid_float_states)) - if "sum" in wanted_statistics[entity_id]: + if "sum" in wanted_statistics[entity_id].types: to_query.add(entity_id) last_stats = statistics.get_latest_short_term_statistics_with_session( @@ -483,6 +543,10 @@ def compile_statistics( # noqa: C901 state_class, valid_float_states, ) in to_process: + mean_type = StatisticMeanType.NONE + if "mean" in wanted_statistics[entity_id].types: + mean_type = wanted_statistics[entity_id].mean_type + # Check metadata if old_metadata := old_metadatas.get(entity_id): if not _equivalent_units( @@ -508,10 +572,34 @@ def compile_statistics( # noqa: C901 ) continue + if ( + mean_type is not StatisticMeanType.NONE + and (old_mean_type := old_metadata[1]["mean_type"]) + is not StatisticMeanType.NONE + and mean_type != old_mean_type + ): + if WARN_STATISTICS_MEAN_CHANGED not in hass.data: + hass.data[WARN_STATISTICS_MEAN_CHANGED] = set() + if entity_id not in hass.data[WARN_STATISTICS_MEAN_CHANGED]: + hass.data[WARN_STATISTICS_MEAN_CHANGED].add(entity_id) + _LOGGER.warning( + ( + "The statistics mean algorithm for %s have changed from %s to %s." + " Generation of long term statistics will be suppressed" + " unless it changes back or go to %s to delete the old" + " statistics" + ), + entity_id, + old_mean_type.name, + mean_type.name, + LINK_DEV_STATISTICS, + ) + continue + # Set meta data meta: StatisticMetaData = { - "has_mean": "mean" in wanted_statistics[entity_id], - "has_sum": "sum" in wanted_statistics[entity_id], + "mean_type": mean_type, + "has_sum": "sum" in wanted_statistics[entity_id].types, "name": None, "source": RECORDER_DOMAIN, "statistic_id": entity_id, @@ -520,19 +608,26 @@ def compile_statistics( # noqa: C901 # Make calculations stat: StatisticData = {"start": start} - if "max" in wanted_statistics[entity_id]: + if "max" in wanted_statistics[entity_id].types: stat["max"] = max( *itertools.islice(zip(*valid_float_states, strict=False), 1) ) - if "min" in wanted_statistics[entity_id]: + if "min" in wanted_statistics[entity_id].types: stat["min"] = min( *itertools.islice(zip(*valid_float_states, strict=False), 1) ) - if "mean" in wanted_statistics[entity_id]: - stat["mean"] = _time_weighted_average(valid_float_states, start, end) + match mean_type: + case StatisticMeanType.ARITHMETIC: + stat["mean"] = _time_weighted_arithmetic_mean( + valid_float_states, start, end + ) + case StatisticMeanType.CIRCULAR: + stat["mean"] = _time_weighted_circular_mean( + valid_float_states, start, end + ) - if "sum" in wanted_statistics[entity_id]: + if "sum" in wanted_statistics[entity_id].types: last_reset = old_last_reset = None new_state = old_state = None _sum = 0.0 @@ -656,18 +751,25 @@ def list_statistic_ids( attributes = state.attributes state_class = attributes[ATTR_STATE_CLASS] provided_statistics = DEFAULT_STATISTICS[state_class] - if statistic_type is not None and statistic_type not in provided_statistics: + if ( + statistic_type is not None + and statistic_type not in provided_statistics.types + ): continue if ( - (has_sum := "sum" in provided_statistics) + (has_sum := "sum" in provided_statistics.types) and ATTR_LAST_RESET not in attributes and state_class == SensorStateClass.MEASUREMENT ): continue + mean_type = StatisticMeanType.NONE + if "mean" in provided_statistics.types: + mean_type = provided_statistics.mean_type + result[entity_id] = { - "has_mean": "mean" in provided_statistics, + "mean_type": mean_type, "has_sum": has_sum, "name": None, "source": RECORDER_DOMAIN, @@ -697,7 +799,7 @@ def _update_issues( if numeric and state_class is None: # Sensor no longer has a valid state class report_issue( - "state_class_removed", + STATE_CLASS_REMOVED_ISSUE, entity_id, {"statistic_id": entity_id}, ) @@ -708,7 +810,7 @@ def _update_issues( if numeric and not _equivalent_units({state_unit, metadata_unit}): # The unit has changed, and it's not possible to convert report_issue( - "units_changed", + UNITS_CHANGED_ISSUE, entity_id, { "statistic_id": entity_id, @@ -722,7 +824,7 @@ def _update_issues( valid_units = (unit or "" for unit in converter.VALID_UNITS) valid_units_str = ", ".join(sorted(valid_units)) report_issue( - "units_changed", + UNITS_CHANGED_ISSUE, entity_id, { "statistic_id": entity_id, @@ -732,6 +834,23 @@ def _update_issues( }, ) + if ( + (metadata_mean_type := metadata[1]["mean_type"]) is not None + and state_class + and (state_mean_type := DEFAULT_STATISTICS[state_class].mean_type) + != metadata_mean_type + ): + # The mean type has changed and the old statistics are not valid anymore + report_issue( + MEAN_TYPE_CHANGED_ISSUE, + entity_id, + { + "statistic_id": entity_id, + "metadata_mean_type": metadata_mean_type, + "state_mean_type": state_mean_type, + }, + ) + def update_statistics_issues( hass: HomeAssistant, @@ -754,7 +873,11 @@ def update_statistics_issues( issue.domain != DOMAIN or not (issue_data := issue.data) or issue_data.get("issue_type") - not in ("state_class_removed", "units_changed") + not in ( + STATE_CLASS_REMOVED_ISSUE, + UNITS_CHANGED_ISSUE, + MEAN_TYPE_CHANGED_ISSUE, + ) ): continue issues.add(issue.issue_id) diff --git a/homeassistant/components/sensor/strings.json b/homeassistant/components/sensor/strings.json index ae414a178e9..123c30da72e 100644 --- a/homeassistant/components/sensor/strings.json +++ b/homeassistant/components/sensor/strings.json @@ -278,10 +278,10 @@ "name": "Timestamp" }, "volatile_organic_compounds": { - "name": "VOCs" + "name": "Volatile organic compounds" }, "volatile_organic_compounds_parts": { - "name": "[%key:component::sensor::entity_component::volatile_organic_compounds::name%]" + "name": "Volatile organic compounds parts" }, "voltage": { "name": "Voltage" @@ -309,6 +309,10 @@ } }, "issues": { + "mean_type_changed": { + "title": "The mean type of {statistic_id} has changed", + "description": "" + }, "state_class_removed": { "title": "{statistic_id} no longer has a state class", "description": "" diff --git a/homeassistant/components/serial/manifest.json b/homeassistant/components/serial/manifest.json index cfe9196f596..2a5d3c78737 100644 --- a/homeassistant/components/serial/manifest.json +++ b/homeassistant/components/serial/manifest.json @@ -4,5 +4,5 @@ "codeowners": ["@fabaff"], "documentation": "https://www.home-assistant.io/integrations/serial", "iot_class": "local_polling", - "requirements": ["pyserial-asyncio-fast==0.14"] + "requirements": ["pyserial-asyncio-fast==0.16"] } diff --git a/homeassistant/components/shelly/__init__.py b/homeassistant/components/shelly/__init__.py index a7ee1c029df..ee28c41f18b 100644 --- a/homeassistant/components/shelly/__init__.py +++ b/homeassistant/components/shelly/__init__.py @@ -111,6 +111,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ShellyConfigEntry) -> bool: """Set up Shelly from a config entry.""" + entry.runtime_data = ShellyEntryData([]) + # The custom component for Shelly devices uses shelly domain as well as core # integration. If the user removes the custom component but doesn't remove the # config entry, core integration will try to configure that config entry with an @@ -162,7 +164,8 @@ async def _async_setup_block_entry( device_entry = None sleep_period = entry.data.get(CONF_SLEEP_PERIOD) - runtime_data = entry.runtime_data = ShellyEntryData(BLOCK_SLEEPING_PLATFORMS) + runtime_data = entry.runtime_data + runtime_data.platforms = BLOCK_SLEEPING_PLATFORMS # Some old firmware have a wrong sleep period hardcoded value. # Following code block will force the right value for affected devices @@ -189,13 +192,25 @@ async def _async_setup_block_entry( if not device.firmware_supported: async_create_issue_unsupported_firmware(hass, entry) await device.shutdown() - raise ConfigEntryNotReady + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="firmware_unsupported", + translation_placeholders={"device": entry.title}, + ) except (DeviceConnectionError, MacAddressMismatchError) as err: await device.shutdown() - raise ConfigEntryNotReady(repr(err)) from err + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="device_communication_error", + translation_placeholders={"device": entry.title}, + ) from err except InvalidAuthError as err: await device.shutdown() - raise ConfigEntryAuthFailed(repr(err)) from err + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="auth_error", + translation_placeholders={"device": entry.title}, + ) from err runtime_data.block = ShellyBlockCoordinator(hass, entry, device) runtime_data.block.async_setup() @@ -261,7 +276,8 @@ async def _async_setup_rpc_entry(hass: HomeAssistant, entry: ShellyConfigEntry) device_entry = None sleep_period = entry.data.get(CONF_SLEEP_PERIOD) - runtime_data = entry.runtime_data = ShellyEntryData(RPC_SLEEPING_PLATFORMS) + runtime_data = entry.runtime_data + runtime_data.platforms = RPC_SLEEPING_PLATFORMS if sleep_period == 0: # Not a sleeping device, finish setup @@ -272,16 +288,30 @@ async def _async_setup_rpc_entry(hass: HomeAssistant, entry: ShellyConfigEntry) if not device.firmware_supported: async_create_issue_unsupported_firmware(hass, entry) await device.shutdown() - raise ConfigEntryNotReady - runtime_data.rpc_script_events = await get_rpc_scripts_event_types( - device, ignore_scripts=[BLE_SCRIPT_NAME] - ) + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="firmware_unsupported", + translation_placeholders={"device": entry.title}, + ) + runtime_data.rpc_supports_scripts = await device.supports_scripts() + if runtime_data.rpc_supports_scripts: + runtime_data.rpc_script_events = await get_rpc_scripts_event_types( + device, ignore_scripts=[BLE_SCRIPT_NAME] + ) except (DeviceConnectionError, MacAddressMismatchError, RpcCallError) as err: await device.shutdown() - raise ConfigEntryNotReady(repr(err)) from err + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="device_communication_error", + translation_placeholders={"device": entry.title}, + ) from err except InvalidAuthError as err: await device.shutdown() - raise ConfigEntryAuthFailed(repr(err)) from err + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="auth_error", + translation_placeholders={"device": entry.title}, + ) from err runtime_data.rpc = ShellyRpcCoordinator(hass, entry, device) runtime_data.rpc.async_setup() diff --git a/homeassistant/components/shelly/button.py b/homeassistant/components/shelly/button.py index 15bde4fbdff..06dffba5ead 100644 --- a/homeassistant/components/shelly/button.py +++ b/homeassistant/components/shelly/button.py @@ -193,8 +193,7 @@ class ShellyBaseButton( translation_key="device_communication_action_error", translation_placeholders={ "entity": self.entity_id, - "device": self.coordinator.device.name, - "error": repr(err), + "device": self.coordinator.name, }, ) from err except RpcCallError as err: @@ -203,8 +202,7 @@ class ShellyBaseButton( translation_key="rpc_call_action_error", translation_placeholders={ "entity": self.entity_id, - "device": self.coordinator.device.name, - "error": repr(err), + "device": self.coordinator.name, }, ) from err except InvalidAuthError: diff --git a/homeassistant/components/shelly/climate.py b/homeassistant/components/shelly/climate.py index c3612ed3f4f..498f2d3dba9 100644 --- a/homeassistant/components/shelly/climate.py +++ b/homeassistant/components/shelly/climate.py @@ -326,8 +326,12 @@ class BlockSleepingClimate( except DeviceConnectionError as err: self.coordinator.last_update_success = False raise HomeAssistantError( - f"Setting state for entity {self.name} failed, state: {kwargs}, error:" - f" {err!r}" + translation_domain=DOMAIN, + translation_key="device_communication_action_error", + translation_placeholders={ + "entity": self.entity_id, + "device": self.coordinator.name, + }, ) from err except InvalidAuthError: await self.coordinator.async_shutdown_device_and_start_reauth() diff --git a/homeassistant/components/shelly/config_flow.py b/homeassistant/components/shelly/config_flow.py index c7c1cd70a53..200a88ea24c 100644 --- a/homeassistant/components/shelly/config_flow.py +++ b/homeassistant/components/shelly/config_flow.py @@ -7,12 +7,7 @@ from typing import Any, Final from aioshelly.block_device import BlockDevice from aioshelly.common import ConnectionOptions, get_info -from aioshelly.const import ( - BLOCK_GENERATIONS, - DEFAULT_HTTP_PORT, - MODEL_WALL_DISPLAY, - RPC_GENERATIONS, -) +from aioshelly.const import BLOCK_GENERATIONS, DEFAULT_HTTP_PORT, RPC_GENERATIONS from aioshelly.exceptions import ( CustomPortNotSupported, DeviceConnectionError, @@ -461,11 +456,9 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_supports_options_flow(cls, config_entry: ShellyConfigEntry) -> bool: """Return options flow support for this handler.""" - return ( - get_device_entry_gen(config_entry) in RPC_GENERATIONS - and not config_entry.data.get(CONF_SLEEP_PERIOD) - and config_entry.data.get(CONF_MODEL) != MODEL_WALL_DISPLAY - ) + return get_device_entry_gen( + config_entry + ) in RPC_GENERATIONS and not config_entry.data.get(CONF_SLEEP_PERIOD) class OptionsFlowHandler(OptionsFlow): @@ -475,6 +468,13 @@ class OptionsFlowHandler(OptionsFlow): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle options flow.""" + if ( + supports_scripts := self.config_entry.runtime_data.rpc_supports_scripts + ) is None: + return self.async_abort(reason="cannot_connect") + if not supports_scripts: + return self.async_abort(reason="no_scripts_support") + if user_input is not None: return self.async_create_entry(title="", data=user_input) diff --git a/homeassistant/components/shelly/const.py b/homeassistant/components/shelly/const.py index c94c827b7db..43fb6df18d0 100644 --- a/homeassistant/components/shelly/const.py +++ b/homeassistant/components/shelly/const.py @@ -25,6 +25,7 @@ from aioshelly.const import ( MODEL_VALVE, MODEL_VINTAGE_V2, MODEL_WALL_DISPLAY, + MODEL_WALL_DISPLAY_X2, ) from homeassistant.components.number import NumberMode @@ -245,6 +246,7 @@ GEN2_RELEASE_URL = "https://shelly-api-docs.shelly.cloud/gen2/changelog/" GEN2_BETA_RELEASE_URL = f"{GEN2_RELEASE_URL}#unreleased" DEVICES_WITHOUT_FIRMWARE_CHANGELOG = ( MODEL_WALL_DISPLAY, + MODEL_WALL_DISPLAY_X2, MODEL_MOTION, MODEL_MOTION_2, MODEL_VALVE, diff --git a/homeassistant/components/shelly/coordinator.py b/homeassistant/components/shelly/coordinator.py index 85cf430bc5d..4a1ea72f38a 100644 --- a/homeassistant/components/shelly/coordinator.py +++ b/homeassistant/components/shelly/coordinator.py @@ -89,6 +89,7 @@ class ShellyEntryData: rpc: ShellyRpcCoordinator | None = None rpc_poll: ShellyRpcPollingCoordinator | None = None rpc_script_events: dict[int, list[str]] | None = None + rpc_supports_scripts: bool | None = None type ShellyConfigEntry = ConfigEntry[ShellyEntryData] @@ -378,14 +379,23 @@ class ShellyBlockCoordinator(ShellyCoordinatorBase[BlockDevice]): if self.sleep_period: # Sleeping device, no point polling it, just mark it unavailable raise UpdateFailed( - f"Sleeping device did not update within {self.sleep_period} seconds interval" + translation_domain=DOMAIN, + translation_key="update_error_sleeping_device", + translation_placeholders={ + "device": self.name, + "period": str(self.sleep_period), + }, ) LOGGER.debug("Polling Shelly Block Device - %s", self.name) try: await self.device.update() except DeviceConnectionError as err: - raise UpdateFailed(repr(err)) from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error", + translation_placeholders={"device": self.name}, + ) from err except InvalidAuthError: await self.async_shutdown_device_and_start_reauth() @@ -470,7 +480,11 @@ class ShellyRestCoordinator(ShellyCoordinatorBase[BlockDevice]): return await self.device.update_shelly() except (DeviceConnectionError, MacAddressMismatchError) as err: - raise UpdateFailed(repr(err)) from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error", + translation_placeholders={"device": self.name}, + ) from err except InvalidAuthError: await self.async_shutdown_device_and_start_reauth() else: @@ -636,7 +650,12 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): if self.sleep_period: # Sleeping device, no point polling it, just mark it unavailable raise UpdateFailed( - f"Sleeping device did not update within {self.sleep_period} seconds interval" + translation_domain=DOMAIN, + translation_key="update_error_sleeping_device", + translation_placeholders={ + "device": self.name, + "period": str(self.sleep_period), + }, ) async with self._connection_lock: @@ -644,7 +663,11 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): return if not await self._async_device_connect_task(): - raise UpdateFailed("Device reconnect error") + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error_reconnect_error", + translation_placeholders={"device": self.name}, + ) async def _async_disconnected(self, reconnect: bool) -> None: """Handle device disconnected.""" @@ -694,7 +717,8 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): is updated. """ if not self.sleep_period: - await self._async_connect_ble_scanner() + if self.config_entry.runtime_data.rpc_supports_scripts: + await self._async_connect_ble_scanner() else: await self._async_setup_outbound_websocket() @@ -820,13 +844,21 @@ class ShellyRpcPollingCoordinator(ShellyCoordinatorBase[RpcDevice]): async def _async_update_data(self) -> None: """Fetch data.""" if not self.device.connected: - raise UpdateFailed("Device disconnected") + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error_device_disconnected", + translation_placeholders={"device": self.name}, + ) LOGGER.debug("Polling Shelly RPC Device - %s", self.name) try: await self.device.poll() except (DeviceConnectionError, RpcCallError) as err: - raise UpdateFailed(f"Device disconnected: {err!r}") from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error", + translation_placeholders={"device": self.name}, + ) from err except InvalidAuthError: await self.async_shutdown_device_and_start_reauth() diff --git a/homeassistant/components/shelly/device_trigger.py b/homeassistant/components/shelly/device_trigger.py index 6e96eb5ed21..740e6aae9b2 100644 --- a/homeassistant/components/shelly/device_trigger.py +++ b/homeassistant/components/shelly/device_trigger.py @@ -105,7 +105,9 @@ async def async_validate_trigger_config( return config raise InvalidDeviceAutomationConfig( - f"Invalid ({CONF_TYPE},{CONF_SUBTYPE}): {trigger}" + translation_domain=DOMAIN, + translation_key="invalid_trigger", + translation_placeholders={"trigger": str(trigger)}, ) @@ -137,7 +139,11 @@ async def async_get_triggers( return triggers - raise InvalidDeviceAutomationConfig(f"Device not found: {device_id}") + raise InvalidDeviceAutomationConfig( + translation_domain=DOMAIN, + translation_key="device_not_found", + translation_placeholders={"device": device_id}, + ) async def async_attach_trigger( diff --git a/homeassistant/components/shelly/entity.py b/homeassistant/components/shelly/entity.py index 58ac34fc5ca..9ed3f47b41a 100644 --- a/homeassistant/components/shelly/entity.py +++ b/homeassistant/components/shelly/entity.py @@ -19,7 +19,7 @@ from homeassistant.helpers.entity_registry import RegistryEntry from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import CONF_SLEEP_PERIOD, LOGGER +from .const import CONF_SLEEP_PERIOD, DOMAIN, LOGGER from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry, ShellyRpcCoordinator from .utils import ( async_remove_shelly_entity, @@ -345,8 +345,12 @@ class ShellyBlockEntity(CoordinatorEntity[ShellyBlockCoordinator]): except DeviceConnectionError as err: self.coordinator.last_update_success = False raise HomeAssistantError( - f"Setting state for entity {self.name} failed, state: {kwargs}, error:" - f" {err!r}" + translation_domain=DOMAIN, + translation_key="device_communication_action_error", + translation_placeholders={ + "entity": self.entity_id, + "device": self.coordinator.name, + }, ) from err except InvalidAuthError: await self.coordinator.async_shutdown_device_and_start_reauth() @@ -406,13 +410,21 @@ class ShellyRpcEntity(CoordinatorEntity[ShellyRpcCoordinator]): except DeviceConnectionError as err: self.coordinator.last_update_success = False raise HomeAssistantError( - f"Call RPC for {self.name} connection error, method: {method}, params:" - f" {params}, error: {err!r}" + translation_domain=DOMAIN, + translation_key="device_communication_action_error", + translation_placeholders={ + "entity": self.entity_id, + "device": self.coordinator.name, + }, ) from err except RpcCallError as err: raise HomeAssistantError( - f"Call RPC for {self.name} request error, method: {method}, params:" - f" {params}, error: {err!r}" + translation_domain=DOMAIN, + translation_key="rpc_call_action_error", + translation_placeholders={ + "entity": self.entity_id, + "device": self.coordinator.name, + }, ) from err except InvalidAuthError: await self.coordinator.async_shutdown_device_and_start_reauth() diff --git a/homeassistant/components/shelly/number.py b/homeassistant/components/shelly/number.py index a8e6de1ca73..c629eb4a57a 100644 --- a/homeassistant/components/shelly/number.py +++ b/homeassistant/components/shelly/number.py @@ -25,7 +25,7 @@ from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH, DeviceIn from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.entity_registry import RegistryEntry -from .const import CONF_SLEEP_PERIOD, LOGGER, VIRTUAL_NUMBER_MODE_MAP +from .const import CONF_SLEEP_PERIOD, DOMAIN, LOGGER, VIRTUAL_NUMBER_MODE_MAP from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry, ShellyRpcCoordinator from .entity import ( BlockEntityDescription, @@ -324,8 +324,12 @@ class BlockSleepingNumber(ShellySleepingBlockAttributeEntity, RestoreNumber): except DeviceConnectionError as err: self.coordinator.last_update_success = False raise HomeAssistantError( - f"Setting state for entity {self.name} failed, state: {params}, error:" - f" {err!r}" + translation_domain=DOMAIN, + translation_key="device_communication_action_error", + translation_placeholders={ + "entity": self.entity_id, + "device": self.coordinator.name, + }, ) from err except InvalidAuthError: await self.coordinator.async_shutdown_device_and_start_reauth() diff --git a/homeassistant/components/shelly/quality_scale.yaml b/homeassistant/components/shelly/quality_scale.yaml new file mode 100644 index 00000000000..ac2a0756b5b --- /dev/null +++ b/homeassistant/components/shelly/quality_scale.yaml @@ -0,0 +1,72 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: The integration does not register services. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: + status: todo + comment: make sure flows end with created entry or abort + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: The integration does not register services. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: done + entity-unique-id: done + has-entity-name: todo + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: The integration does not register services. + config-entry-unloading: done + docs-configuration-parameters: done + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: todo + reauthentication-flow: done + test-coverage: done + + # Gold + devices: done + diagnostics: done + discovery-update-info: done + discovery: done + docs-data-update: todo + docs-examples: done + docs-known-limitations: done + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: todo + docs-use-cases: done + dynamic-devices: + status: exempt + comment: The integration connects to a single device. + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: todo + exception-translations: todo + icon-translations: todo + reconfiguration-flow: done + repair-issues: done + stale-devices: + status: todo + comment: BLU TRV needs to be removed when un-paired + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/shelly/strings.json b/homeassistant/components/shelly/strings.json index 22d88928387..3465891dc68 100644 --- a/homeassistant/components/shelly/strings.json +++ b/homeassistant/components/shelly/strings.json @@ -17,12 +17,20 @@ "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "username": "Username for the device's web panel.", + "password": "Password for the device's web panel." } }, "reauth_confirm": { "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "username": "[%key:component::shelly::config::step::credentials::data_description::username%]", + "password": "[%key:component::shelly::config::step::credentials::data_description::password%]" } }, "confirm_discovery": { @@ -87,8 +95,15 @@ "description": "Bluetooth scanning can be active or passive. With active, the Shelly requests data from nearby devices; with passive, the Shelly receives unsolicited data from nearby devices.", "data": { "ble_scanner_mode": "Bluetooth scanner mode" + }, + "data_description": { + "ble_scanner_mode": "The scanner mode to use for Bluetooth scanning." } } + }, + "abort": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "no_scripts_support": "Device does not support scripts and cannot be used as a Bluetooth scanner." } }, "selector": { @@ -195,20 +210,53 @@ "state": { "checking": "Checking", "closed": "[%key:common::state::closed%]", - "closing": "Closing", + "closing": "[%key:common::state::closing%]", "failure": "Failure", "opened": "Opened", - "opening": "Opening" + "opening": "[%key:common::state::opening%]" } } } }, "exceptions": { + "auth_error": { + "message": "Authentication failed for {device}, please update your credentials" + }, + "device_communication_error": { + "message": "Device communication error occurred for {device}" + }, "device_communication_action_error": { - "message": "Device communication error occurred while calling the entity {entity} action for {device} device: {error}" + "message": "Device communication error occurred while calling action for {entity} of {device}" + }, + "device_not_found": { + "message": "{device} not found while configuring device automation triggers" + }, + "firmware_unsupported": { + "message": "{device} is running an unsupported firmware, please update the firmware" + }, + "invalid_trigger": { + "message": "Invalid device automation trigger (type, subtype): {trigger}" + }, + "ota_update_connection_error": { + "message": "Device communication error occurred while triggering OTA update for {device}" + }, + "ota_update_rpc_error": { + "message": "RPC call error occurred while triggering OTA update for {device}" }, "rpc_call_action_error": { - "message": "RPC call error occurred while calling the entity {entity} action for {device} device: {error}" + "message": "RPC call error occurred while calling action for {entity} of {device}" + }, + "update_error": { + "message": "An error occurred while retrieving data from {device}" + }, + "update_error_device_disconnected": { + "message": "An error occurred while retrieving data from {device} because it is disconnected" + }, + "update_error_reconnect_error": { + "message": "An error occurred while reconnecting to {device}" + }, + "update_error_sleeping_device": { + "message": "Sleeping device did not update within {period} seconds interval" } }, "issues": { diff --git a/homeassistant/components/shelly/update.py b/homeassistant/components/shelly/update.py index b1aa84b2640..12ce6dc70cd 100644 --- a/homeassistant/components/shelly/update.py +++ b/homeassistant/components/shelly/update.py @@ -25,7 +25,14 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity -from .const import CONF_SLEEP_PERIOD, OTA_BEGIN, OTA_ERROR, OTA_PROGRESS, OTA_SUCCESS +from .const import ( + CONF_SLEEP_PERIOD, + DOMAIN, + OTA_BEGIN, + OTA_ERROR, + OTA_PROGRESS, + OTA_SUCCESS, +) from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry, ShellyRpcCoordinator from .entity import ( RestEntityDescription, @@ -198,7 +205,11 @@ class RestUpdateEntity(ShellyRestAttributeEntity, UpdateEntity): try: result = await self.coordinator.device.trigger_ota_update(beta=beta) except DeviceConnectionError as err: - raise HomeAssistantError(f"Error starting OTA update: {err!r}") from err + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="ota_update_connection_error", + translation_placeholders={"device": self.coordinator.name}, + ) from err except InvalidAuthError: await self.coordinator.async_shutdown_device_and_start_reauth() else: @@ -310,9 +321,20 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity): try: await self.coordinator.device.trigger_ota_update(beta=beta) except DeviceConnectionError as err: - raise HomeAssistantError(f"OTA update connection error: {err!r}") from err + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="ota_update_connection_error", + translation_placeholders={"device": self.coordinator.name}, + ) from err except RpcCallError as err: - raise HomeAssistantError(f"OTA update request error: {err!r}") from err + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="ota_update_rpc_error", + translation_placeholders={ + "entity": self.entity_id, + "device": self.coordinator.name, + }, + ) from err except InvalidAuthError: await self.coordinator.async_shutdown_device_and_start_reauth() else: diff --git a/homeassistant/components/siemens/__init__.py b/homeassistant/components/siemens/__init__.py new file mode 100644 index 00000000000..314b7c63da9 --- /dev/null +++ b/homeassistant/components/siemens/__init__.py @@ -0,0 +1 @@ +"""Siemens virtual integration.""" diff --git a/homeassistant/components/siemens/manifest.json b/homeassistant/components/siemens/manifest.json new file mode 100644 index 00000000000..e53aca0895f --- /dev/null +++ b/homeassistant/components/siemens/manifest.json @@ -0,0 +1,6 @@ +{ + "domain": "siemens", + "name": "Siemens", + "integration_type": "virtual", + "supported_by": "home_connect" +} diff --git a/homeassistant/components/skybell/config_flow.py b/homeassistant/components/skybell/config_flow.py index a32441f4cf8..9893d0dd93a 100644 --- a/homeassistant/components/skybell/config_flow.py +++ b/homeassistant/components/skybell/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +import logging from typing import Any from aioskybell import Skybell, exceptions @@ -14,6 +15,8 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN +_LOGGER = logging.getLogger(__name__) + class SkybellFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a config flow for Skybell.""" @@ -95,6 +98,7 @@ class SkybellFlowHandler(ConfigFlow, domain=DOMAIN): return None, "invalid_auth" except exceptions.SkybellException: return None, "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") return None, "unknown" return skybell.user_id, None diff --git a/homeassistant/components/sleepiq/strings.json b/homeassistant/components/sleepiq/strings.json index bdafbfb6c77..60f6026304b 100644 --- a/homeassistant/components/sleepiq/strings.json +++ b/homeassistant/components/sleepiq/strings.json @@ -28,7 +28,7 @@ "select": { "foot_warmer_temp": { "state": { - "off": "Off", + "off": "[%key:common::state::off%]", "low": "Low", "medium": "Medium", "high": "High" diff --git a/homeassistant/components/smappee/strings.json b/homeassistant/components/smappee/strings.json index 2966b5cd753..3037fbc98f6 100644 --- a/homeassistant/components/smappee/strings.json +++ b/homeassistant/components/smappee/strings.json @@ -15,7 +15,7 @@ } }, "zeroconf_confirm": { - "description": "Do you want to add the Smappee device with serialnumber `{serialnumber}` to Home Assistant?", + "description": "Do you want to add the Smappee device with serial number `{serialnumber}` to Home Assistant?", "title": "Discovered Smappee device" }, "pick_implementation": { diff --git a/homeassistant/components/smartthings/__init__.py b/homeassistant/components/smartthings/__init__.py index e5351798219..346d5e66b42 100644 --- a/homeassistant/components/smartthings/__init__.py +++ b/homeassistant/components/smartthings/__init__.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Callable +import contextlib from dataclasses import dataclass from http import HTTPStatus import logging @@ -12,15 +13,17 @@ from aiohttp import ClientResponseError from pysmartthings import ( Attribute, Capability, + ComponentStatus, Device, DeviceEvent, + Lifecycle, Scene, SmartThings, SmartThingsAuthenticationFailedError, + SmartThingsConnectionError, SmartThingsSinkError, Status, ) -from pysmartthings.models import Lifecycle from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -37,14 +40,16 @@ from homeassistant.const import ( ) from homeassistant.core import Event, HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers import device_registry as dr +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.config_entry_oauth2_flow import ( OAuth2Session, async_get_config_entry_implementation, ) +from homeassistant.helpers.entity_registry import RegistryEntry, async_migrate_entries from .const import ( + BINARY_SENSOR_ATTRIBUTES_TO_CAPABILITIES, CONF_INSTALLED_APP_ID, CONF_LOCATION_ID, CONF_SUBSCRIPTION_ID, @@ -52,6 +57,7 @@ from .const import ( EVENT_BUTTON, MAIN, OLD_DATA, + SENSOR_ATTRIBUTES_TO_CAPABILITIES, ) _LOGGER = logging.getLogger(__name__) @@ -72,7 +78,7 @@ class FullDevice: """Define an object to hold device data.""" device: Device - status: dict[str, dict[Capability | str, dict[Attribute | str, Status]]] + status: dict[str, ComponentStatus] type SmartThingsConfigEntry = ConfigEntry[SmartThingsData] @@ -86,6 +92,7 @@ PLATFORMS = [ Platform.FAN, Platform.LIGHT, Platform.LOCK, + Platform.MEDIA_PLAYER, Platform.NUMBER, Platform.SCENE, Platform.SELECT, @@ -124,7 +131,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: SmartThingsConfigEntry) client.refresh_token_function = _refresh_token def _handle_max_connections() -> None: - _LOGGER.debug("We hit the limit of max connections") + _LOGGER.debug( + "We hit the limit of max connections or we could not remove the old one, so retrying" + ) hass.config_entries.async_schedule_reload(entry.entry_id) client.max_connections_reached_callback = _handle_max_connections @@ -147,7 +156,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: SmartThingsConfigEntry) if (old_identifier := entry.data.get(CONF_SUBSCRIPTION_ID)) is not None: _LOGGER.debug("Trying to delete old subscription %s", old_identifier) - await client.delete_subscription(old_identifier) + try: + await client.delete_subscription(old_identifier) + except SmartThingsConnectionError as err: + raise ConfigEntryNotReady("Could not delete old subscription") from err _LOGGER.debug("Trying to create a new subscription") try: @@ -274,7 +286,8 @@ async def async_unload_entry( """Unload a config entry.""" client = entry.runtime_data.client if (subscription_id := entry.data.get(CONF_SUBSCRIPTION_ID)) is not None: - await client.delete_subscription(subscription_id) + with contextlib.suppress(SmartThingsConnectionError): + await client.delete_subscription(subscription_id) return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) @@ -287,9 +300,112 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry, version=3, data={OLD_DATA: dict(entry.data)} ) + if entry.minor_version < 2: + + def migrate_entities(entity_entry: RegistryEntry) -> dict[str, Any] | None: + if entity_entry.domain == "binary_sensor": + device_id, attribute = entity_entry.unique_id.split(".") + if ( + capability := BINARY_SENSOR_ATTRIBUTES_TO_CAPABILITIES.get( + attribute + ) + ) is None: + return None + new_unique_id = ( + f"{device_id}_{MAIN}_{capability}_{attribute}_{attribute}" + ) + return { + "new_unique_id": new_unique_id, + } + if entity_entry.domain in {"cover", "climate", "fan", "light", "lock"}: + return {"new_unique_id": f"{entity_entry.unique_id}_{MAIN}"} + if entity_entry.domain == "sensor": + delimiter = "." if " " not in entity_entry.unique_id else " " + if delimiter not in entity_entry.unique_id: + return None + device_id, attribute = entity_entry.unique_id.split( + delimiter, maxsplit=1 + ) + if ( + capability := SENSOR_ATTRIBUTES_TO_CAPABILITIES.get(attribute) + ) is None: + if attribute in { + "energy_meter", + "power_meter", + "deltaEnergy_meter", + "powerEnergy_meter", + "energySaved_meter", + }: + return { + "new_unique_id": f"{device_id}_{MAIN}_{Capability.POWER_CONSUMPTION_REPORT}_{Attribute.POWER_CONSUMPTION}_{attribute}", + } + if attribute in { + "X Coordinate", + "Y Coordinate", + "Z Coordinate", + }: + new_attribute = { + "X Coordinate": "x_coordinate", + "Y Coordinate": "y_coordinate", + "Z Coordinate": "z_coordinate", + }[attribute] + return { + "new_unique_id": f"{device_id}_{MAIN}_{Capability.THREE_AXIS}_{Attribute.THREE_AXIS}_{new_attribute}", + } + if attribute in { + Attribute.MACHINE_STATE, + Attribute.COMPLETION_TIME, + }: + capability = determine_machine_type( + hass, entry.entry_id, device_id + ) + if capability is None: + return None + return { + "new_unique_id": f"{device_id}_{MAIN}_{capability}_{attribute}_{attribute}", + } + return None + return { + "new_unique_id": f"{device_id}_{MAIN}_{capability}_{attribute}_{attribute}", + } + + if entity_entry.domain == "switch": + return { + "new_unique_id": f"{entity_entry.unique_id}_{MAIN}_{Capability.SWITCH}_{Attribute.SWITCH}_{Attribute.SWITCH}", + } + + return None + + await async_migrate_entries(hass, entry.entry_id, migrate_entities) + hass.config_entries.async_update_entry( + entry, + minor_version=2, + ) + return True +def determine_machine_type( + hass: HomeAssistant, + entry_id: str, + device_id: str, +) -> Capability | None: + """Determine the machine type for a device.""" + entity_registry = er.async_get(hass) + entries = er.async_entries_for_config_entry(entity_registry, entry_id) + device_entries = [entry for entry in entries if device_id in entry.unique_id] + for entry in device_entries: + if Attribute.DISHWASHER_JOB_STATE in entry.unique_id: + return Capability.DISHWASHER_OPERATING_STATE + if Attribute.WASHER_JOB_STATE in entry.unique_id: + return Capability.WASHER_OPERATING_STATE + if Attribute.DRYER_JOB_STATE in entry.unique_id: + return Capability.DRYER_OPERATING_STATE + if Attribute.OVEN_JOB_STATE in entry.unique_id: + return Capability.OVEN_OPERATING_STATE + return None + + def create_devices( device_registry: dr.DeviceRegistry, devices: dict[str, FullDevice], @@ -297,7 +413,9 @@ def create_devices( rooms: dict[str, str], ) -> None: """Create devices in the device registry.""" - for device in devices.values(): + for device in sorted( + devices.values(), key=lambda d: d.device.parent_device_id or "" + ): kwargs: dict[str, Any] = {} if device.device.hub is not None: kwargs = { @@ -308,7 +426,7 @@ def create_devices( kwargs[ATTR_CONNECTIONS] = { (dr.CONNECTION_NETWORK_MAC, device.device.hub.mac_address) } - if device.device.parent_device_id: + if device.device.parent_device_id and device.device.parent_device_id in devices: kwargs[ATTR_VIA_DEVICE] = (DOMAIN, device.device.parent_device_id) if (ocf := device.device.ocf) is not None: kwargs.update( @@ -355,9 +473,7 @@ KEEP_CAPABILITY_QUIRK: dict[ } -def process_status( - status: dict[str, dict[Capability | str, dict[Attribute | str, Status]]], -) -> dict[str, dict[Capability | str, dict[Attribute | str, Status]]]: +def process_status(status: dict[str, ComponentStatus]) -> dict[str, ComponentStatus]: """Remove disabled capabilities from status.""" if (main_component := status.get(MAIN)) is None: return status diff --git a/homeassistant/components/smartthings/binary_sensor.py b/homeassistant/components/smartthings/binary_sensor.py index f776aa70c41..bd09f1725d3 100644 --- a/homeassistant/components/smartthings/binary_sensor.py +++ b/homeassistant/components/smartthings/binary_sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from pysmartthings import Attribute, Capability, Category, SmartThings +from pysmartthings import Attribute, Capability, Category, SmartThings, Status from homeassistant.components.automation import automations_with_entity from homeassistant.components.binary_sensor import ( @@ -38,6 +38,9 @@ class SmartThingsBinarySensorEntityDescription(BinarySensorEntityDescription): category: set[Category] | None = None exists_fn: Callable[[str], bool] | None = None component_translation_key: dict[str, str] | None = None + deprecated_fn: Callable[ + [dict[str, dict[Capability | str, dict[Attribute | str, Status]]]], str | None + ] = lambda _: None CAPABILITY_TO_SENSORS: dict[ @@ -66,6 +69,19 @@ CAPABILITY_TO_SENSORS: dict[ "freezer": "freezer_door", "cooler": "cooler_door", }, + deprecated_fn=( + lambda status: "fridge_door" + if "freezer" in status and "cooler" in status + else None + ), + ) + }, + Capability.CUSTOM_DRYER_WRINKLE_PREVENT: { + Attribute.OPERATING_STATE: SmartThingsBinarySensorEntityDescription( + key=Attribute.OPERATING_STATE, + translation_key="dryer_wrinkle_prevent_active", + is_on_key="running", + entity_category=EntityCategory.DIAGNOSTIC, ) }, Capability.FILTER_STATUS: { @@ -116,7 +132,14 @@ CAPABILITY_TO_SENSORS: dict[ key=Attribute.SWITCH, device_class=BinarySensorDeviceClass.POWER, is_on_key="on", - category={Category.DRYER, Category.WASHER}, + category={ + Category.CLOTHING_CARE_MACHINE, + Category.COOKTOP, + Category.DISHWASHER, + Category.DRYER, + Category.MICROWAVE, + Category.WASHER, + }, ) }, Capability.TAMPER_ALERT: { @@ -133,6 +156,7 @@ CAPABILITY_TO_SENSORS: dict[ translation_key="valve", device_class=BinarySensorDeviceClass.OPENING, is_on_key="open", + deprecated_fn=lambda _: "valve", ) }, Capability.WATER_SENSOR: { @@ -157,9 +181,7 @@ def get_main_component_category( device: FullDevice, ) -> Category | str: """Get the main component of a device.""" - main = next( - component for component in device.device.components if component.id == MAIN - ) + main = device.device.components[MAIN] return main.user_category or main.manufacturer_category @@ -209,7 +231,7 @@ class SmartThingsBinarySensor(SmartThingsEntity, BinarySensorEntity): self._attribute = attribute self.capability = capability self.entity_description = entity_description - self._attr_unique_id = f"{device.device.device_id}.{attribute}" + self._attr_unique_id = f"{device.device.device_id}_{component}_{capability}_{attribute}_{attribute}" if ( entity_description.category_device_class and (category := get_main_component_category(device)) @@ -227,9 +249,6 @@ class SmartThingsBinarySensor(SmartThingsEntity, BinarySensorEntity): is not None ): self._attr_translation_key = translation_key - self._attr_unique_id = ( - f"{device.device.device_id}_{component}_{capability}_{attribute}" - ) @property def is_on(self) -> bool: @@ -242,7 +261,7 @@ class SmartThingsBinarySensor(SmartThingsEntity, BinarySensorEntity): async def async_added_to_hass(self) -> None: """Call when entity is added to hass.""" await super().async_added_to_hass() - if self.capability is not Capability.VALVE: + if (issue := self.entity_description.deprecated_fn(self.device.status)) is None: return automations = automations_with_entity(self.hass, self.entity_id) scripts = scripts_with_entity(self.hass, self.entity_id) @@ -273,11 +292,11 @@ class SmartThingsBinarySensor(SmartThingsEntity, BinarySensorEntity): async_create_issue( self.hass, DOMAIN, - f"deprecated_binary_valve_{self.entity_id}", + f"deprecated_binary_{issue}_{self.entity_id}", breaks_in_ha_version="2025.10.0", is_fixable=False, severity=IssueSeverity.WARNING, - translation_key="deprecated_binary_valve", + translation_key=f"deprecated_binary_{issue}", translation_placeholders={ "entity": self.entity_id, "items": "\n".join(items_list), @@ -287,6 +306,8 @@ class SmartThingsBinarySensor(SmartThingsEntity, BinarySensorEntity): async def async_will_remove_from_hass(self) -> None: """Call when entity will be removed from hass.""" await super().async_will_remove_from_hass() + if (issue := self.entity_description.deprecated_fn(self.device.status)) is None: + return async_delete_issue( - self.hass, DOMAIN, f"deprecated_binary_valve_{self.entity_id}" + self.hass, DOMAIN, f"deprecated_binary_{issue}_{self.entity_id}" ) diff --git a/homeassistant/components/smartthings/button.py b/homeassistant/components/smartthings/button.py index ad61880f3b1..00fbaa0e2c4 100644 --- a/homeassistant/components/smartthings/button.py +++ b/homeassistant/components/smartthings/button.py @@ -29,6 +29,11 @@ CAPABILITIES_TO_BUTTONS: dict[Capability | str, SmartThingsButtonDescription] = translation_key="stop", command=Command.STOP, ), + Capability.CUSTOM_WATER_FILTER: SmartThingsButtonDescription( + key=Capability.CUSTOM_WATER_FILTER, + translation_key="reset_water_filter", + command=Command.RESET_WATER_FILTER, + ), } @@ -63,9 +68,7 @@ class SmartThingsButtonEntity(SmartThingsEntity, ButtonEntity): """Initialize the instance.""" super().__init__(client, device, set()) self.entity_description = entity_description - self._attr_unique_id = ( - f"{device.device.device_id}_{MAIN}_{entity_description.key}" - ) + self._attr_unique_id = f"{device.device.device_id}_{MAIN}_{entity_description.key}_{entity_description.command}" async def async_press(self) -> None: """Press the button.""" diff --git a/homeassistant/components/smartthings/config_flow.py b/homeassistant/components/smartthings/config_flow.py index d2654348527..03c8e4bfa66 100644 --- a/homeassistant/components/smartthings/config_flow.py +++ b/homeassistant/components/smartthings/config_flow.py @@ -20,6 +20,7 @@ class SmartThingsConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN): """Handle configuration of SmartThings integrations.""" VERSION = 3 + MINOR_VERSION = 2 DOMAIN = DOMAIN @property diff --git a/homeassistant/components/smartthings/const.py b/homeassistant/components/smartthings/const.py index 2ba59ade4e8..a3ec9a38200 100644 --- a/homeassistant/components/smartthings/const.py +++ b/homeassistant/components/smartthings/const.py @@ -1,5 +1,7 @@ """Constants used by the SmartThings component and platforms.""" +from pysmartthings import Attribute, Capability + DOMAIN = "smartthings" SCOPES = [ @@ -35,3 +37,75 @@ OLD_DATA = "old_data" CONF_SUBSCRIPTION_ID = "subscription_id" EVENT_BUTTON = "smartthings.button" + +BINARY_SENSOR_ATTRIBUTES_TO_CAPABILITIES: dict[str, str] = { + Attribute.ACCELERATION: Capability.ACCELERATION_SENSOR, + Attribute.CONTACT: Capability.CONTACT_SENSOR, + Attribute.FILTER_STATUS: Capability.FILTER_STATUS, + Attribute.MOTION: Capability.MOTION_SENSOR, + Attribute.PRESENCE: Capability.PRESENCE_SENSOR, + Attribute.SOUND: Capability.SOUND_SENSOR, + Attribute.TAMPER: Capability.TAMPER_ALERT, + Attribute.VALVE: Capability.VALVE, + Attribute.WATER: Capability.WATER_SENSOR, +} + +SENSOR_ATTRIBUTES_TO_CAPABILITIES: dict[str, str] = { + Attribute.LIGHTING_MODE: Capability.ACTIVITY_LIGHTING_MODE, + Attribute.AIR_CONDITIONER_MODE: Capability.AIR_CONDITIONER_MODE, + Attribute.AIR_QUALITY: Capability.AIR_QUALITY_SENSOR, + Attribute.ALARM: Capability.ALARM, + Attribute.BATTERY: Capability.BATTERY, + Attribute.BMI_MEASUREMENT: Capability.BODY_MASS_INDEX_MEASUREMENT, + Attribute.BODY_WEIGHT_MEASUREMENT: Capability.BODY_WEIGHT_MEASUREMENT, + Attribute.CARBON_DIOXIDE: Capability.CARBON_DIOXIDE_MEASUREMENT, + Attribute.CARBON_MONOXIDE: Capability.CARBON_MONOXIDE_MEASUREMENT, + Attribute.CARBON_MONOXIDE_LEVEL: Capability.CARBON_MONOXIDE_MEASUREMENT, + Attribute.DISHWASHER_JOB_STATE: Capability.DISHWASHER_OPERATING_STATE, + Attribute.DRYER_MODE: Capability.DRYER_MODE, + Attribute.DRYER_JOB_STATE: Capability.DRYER_OPERATING_STATE, + Attribute.DUST_LEVEL: Capability.DUST_SENSOR, + Attribute.FINE_DUST_LEVEL: Capability.DUST_SENSOR, + Attribute.ENERGY: Capability.ENERGY_METER, + Attribute.EQUIVALENT_CARBON_DIOXIDE_MEASUREMENT: Capability.EQUIVALENT_CARBON_DIOXIDE_MEASUREMENT, + Attribute.FORMALDEHYDE_LEVEL: Capability.FORMALDEHYDE_MEASUREMENT, + Attribute.GAS_METER: Capability.GAS_METER, + Attribute.GAS_METER_CALORIFIC: Capability.GAS_METER, + Attribute.GAS_METER_TIME: Capability.GAS_METER, + Attribute.GAS_METER_VOLUME: Capability.GAS_METER, + Attribute.ILLUMINANCE: Capability.ILLUMINANCE_MEASUREMENT, + Attribute.INFRARED_LEVEL: Capability.INFRARED_LEVEL, + Attribute.INPUT_SOURCE: Capability.MEDIA_INPUT_SOURCE, + Attribute.PLAYBACK_REPEAT_MODE: Capability.MEDIA_PLAYBACK_REPEAT, + Attribute.PLAYBACK_SHUFFLE: Capability.MEDIA_PLAYBACK_SHUFFLE, + Attribute.PLAYBACK_STATUS: Capability.MEDIA_PLAYBACK, + Attribute.ODOR_LEVEL: Capability.ODOR_SENSOR, + Attribute.OVEN_MODE: Capability.OVEN_MODE, + Attribute.OVEN_JOB_STATE: Capability.OVEN_OPERATING_STATE, + Attribute.OVEN_SETPOINT: Capability.OVEN_SETPOINT, + Attribute.POWER: Capability.POWER_METER, + Attribute.POWER_SOURCE: Capability.POWER_SOURCE, + Attribute.REFRIGERATION_SETPOINT: Capability.REFRIGERATION_SETPOINT, + Attribute.HUMIDITY: Capability.RELATIVE_HUMIDITY_MEASUREMENT, + Attribute.ROBOT_CLEANER_CLEANING_MODE: Capability.ROBOT_CLEANER_CLEANING_MODE, + Attribute.ROBOT_CLEANER_MOVEMENT: Capability.ROBOT_CLEANER_MOVEMENT, + Attribute.ROBOT_CLEANER_TURBO_MODE: Capability.ROBOT_CLEANER_TURBO_MODE, + Attribute.LQI: Capability.SIGNAL_STRENGTH, + Attribute.RSSI: Capability.SIGNAL_STRENGTH, + Attribute.SMOKE: Capability.SMOKE_DETECTOR, + Attribute.TEMPERATURE: Capability.TEMPERATURE_MEASUREMENT, + Attribute.COOLING_SETPOINT: Capability.THERMOSTAT_COOLING_SETPOINT, + Attribute.THERMOSTAT_FAN_MODE: Capability.THERMOSTAT_FAN_MODE, + Attribute.HEATING_SETPOINT: Capability.THERMOSTAT_HEATING_SETPOINT, + Attribute.THERMOSTAT_MODE: Capability.THERMOSTAT_MODE, + Attribute.THERMOSTAT_OPERATING_STATE: Capability.THERMOSTAT_OPERATING_STATE, + Attribute.THERMOSTAT_SETPOINT: Capability.THERMOSTAT_SETPOINT, + Attribute.TV_CHANNEL: Capability.TV_CHANNEL, + Attribute.TV_CHANNEL_NAME: Capability.TV_CHANNEL, + Attribute.TVOC_LEVEL: Capability.TVOC_MEASUREMENT, + Attribute.ULTRAVIOLET_INDEX: Capability.ULTRAVIOLET_INDEX, + Attribute.VERY_FINE_DUST_LEVEL: Capability.VERY_FINE_DUST_SENSOR, + Attribute.VOLTAGE: Capability.VOLTAGE_MEASUREMENT, + Attribute.WASHER_MODE: Capability.WASHER_MODE, + Attribute.WASHER_JOB_STATE: Capability.WASHER_OPERATING_STATE, +} diff --git a/homeassistant/components/smartthings/entity.py b/homeassistant/components/smartthings/entity.py index 12c07bea983..5544297a4c6 100644 --- a/homeassistant/components/smartthings/entity.py +++ b/homeassistant/components/smartthings/entity.py @@ -8,9 +8,9 @@ from pysmartthings import ( Attribute, Capability, Command, + ComponentStatus, DeviceEvent, SmartThings, - Status, ) from homeassistant.helpers.device_registry import DeviceInfo @@ -38,13 +38,13 @@ class SmartThingsEntity(Entity): self.client = client self.capabilities = capabilities self.component = component - self._internal_state: dict[Capability | str, dict[Attribute | str, Status]] = { + self._internal_state: ComponentStatus = { capability: device.status[component][capability] for capability in capabilities if capability in device.status[component] } self.device = device - self._attr_unique_id = device.device.device_id + self._attr_unique_id = f"{device.device.device_id}_{component}" self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, device.device.device_id)}, ) diff --git a/homeassistant/components/smartthings/event.py b/homeassistant/components/smartthings/event.py index e22a32c7726..0439e6391f4 100644 --- a/homeassistant/components/smartthings/event.py +++ b/homeassistant/components/smartthings/event.py @@ -22,10 +22,12 @@ async def async_setup_entry( """Add events for a config entry.""" entry_data = entry.runtime_data async_add_entities( - SmartThingsButtonEvent(entry_data.client, device, component) + SmartThingsButtonEvent( + entry_data.client, device, device.device.components[component] + ) for device in entry_data.devices.values() - for component in device.device.components - if Capability.BUTTON in component.capabilities + for component, capabilities in device.status.items() + if Capability.BUTTON in capabilities ) @@ -56,5 +58,6 @@ class SmartThingsButtonEvent(SmartThingsEntity, EventEntity): ) def _update_handler(self, event: DeviceEvent) -> None: - self._trigger_event(cast(str, event.value)) - self.async_write_ha_state() + if event.attribute is Attribute.BUTTON: + self._trigger_event(cast(str, event.value)) + super()._update_handler(event) diff --git a/homeassistant/components/smartthings/icons.json b/homeassistant/components/smartthings/icons.json index 80ac70edc3f..214a9953a5a 100644 --- a/homeassistant/components/smartthings/icons.json +++ b/homeassistant/components/smartthings/icons.json @@ -1,6 +1,12 @@ { "entity": { "binary_sensor": { + "dryer_wrinkle_prevent_active": { + "default": "mdi:tumble-dryer", + "state": { + "on": "mdi:tumble-dryer-alert" + } + }, "remote_control": { "default": "mdi:remote-off", "state": { @@ -15,6 +21,9 @@ } }, "button": { + "reset_water_filter": { + "default": "mdi:reload" + }, "stop": { "default": "mdi:stop" } @@ -34,11 +43,20 @@ } }, "switch": { + "bubble_soak": { + "default": "mdi:water-off", + "state": { + "on": "mdi:water" + } + }, "wrinkle_prevent": { "default": "mdi:tumble-dryer", "state": { "off": "mdi:tumble-dryer-off" } + }, + "ice_maker": { + "default": "mdi:delete-variant" } } } diff --git a/homeassistant/components/smartthings/manifest.json b/homeassistant/components/smartthings/manifest.json index d7133ce7c6d..2af3e5c193b 100644 --- a/homeassistant/components/smartthings/manifest.json +++ b/homeassistant/components/smartthings/manifest.json @@ -30,5 +30,5 @@ "iot_class": "cloud_push", "loggers": ["pysmartthings"], "quality_scale": "bronze", - "requirements": ["pysmartthings==2.7.4"] + "requirements": ["pysmartthings==3.0.1"] } diff --git a/homeassistant/components/smartthings/media_player.py b/homeassistant/components/smartthings/media_player.py new file mode 100644 index 00000000000..9a676d2efb6 --- /dev/null +++ b/homeassistant/components/smartthings/media_player.py @@ -0,0 +1,355 @@ +"""Support for media players through the SmartThings cloud API.""" + +from __future__ import annotations + +from typing import Any + +from pysmartthings import Attribute, Capability, Category, Command, SmartThings + +from homeassistant.components.media_player import ( + MediaPlayerDeviceClass, + MediaPlayerEntity, + MediaPlayerEntityFeature, + MediaPlayerState, + RepeatMode, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import FullDevice, SmartThingsConfigEntry +from .const import MAIN +from .entity import SmartThingsEntity + +MEDIA_PLAYER_CAPABILITIES = ( + Capability.AUDIO_MUTE, + Capability.AUDIO_VOLUME, + Capability.MEDIA_PLAYBACK, +) + +CONTROLLABLE_SOURCES = ["bluetooth", "wifi"] + +DEVICE_CLASS_MAP: dict[Category | str, MediaPlayerDeviceClass] = { + Category.NETWORK_AUDIO: MediaPlayerDeviceClass.SPEAKER, + Category.SPEAKER: MediaPlayerDeviceClass.SPEAKER, + Category.TELEVISION: MediaPlayerDeviceClass.TV, + Category.RECEIVER: MediaPlayerDeviceClass.RECEIVER, +} + +VALUE_TO_STATE = { + "buffering": MediaPlayerState.BUFFERING, + "paused": MediaPlayerState.PAUSED, + "playing": MediaPlayerState.PLAYING, + "stopped": MediaPlayerState.IDLE, + "fast forwarding": MediaPlayerState.BUFFERING, + "rewinding": MediaPlayerState.BUFFERING, +} + +REPEAT_MODE_TO_HA = { + "all": RepeatMode.ALL, + "one": RepeatMode.ONE, + "off": RepeatMode.OFF, +} + +HA_REPEAT_MODE_TO_SMARTTHINGS = {v: k for k, v in REPEAT_MODE_TO_HA.items()} + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SmartThingsConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Add media players for a config entry.""" + entry_data = entry.runtime_data + + async_add_entities( + SmartThingsMediaPlayer(entry_data.client, device) + for device in entry_data.devices.values() + if all( + capability in device.status[MAIN] + for capability in MEDIA_PLAYER_CAPABILITIES + ) + ) + + +class SmartThingsMediaPlayer(SmartThingsEntity, MediaPlayerEntity): + """Define a SmartThings media player.""" + + _attr_name = None + + def __init__(self, client: SmartThings, device: FullDevice) -> None: + """Initialize the media_player class.""" + super().__init__( + client, + device, + { + Capability.AUDIO_MUTE, + Capability.AUDIO_TRACK_DATA, + Capability.AUDIO_VOLUME, + Capability.MEDIA_INPUT_SOURCE, + Capability.MEDIA_PLAYBACK, + Capability.MEDIA_PLAYBACK_REPEAT, + Capability.MEDIA_PLAYBACK_SHUFFLE, + Capability.SAMSUNG_VD_AUDIO_INPUT_SOURCE, + Capability.SWITCH, + }, + ) + self._attr_supported_features = self._determine_features() + self._attr_device_class = DEVICE_CLASS_MAP.get( + device.device.components[MAIN].user_category + or device.device.components[MAIN].manufacturer_category, + ) + + def _determine_features(self) -> MediaPlayerEntityFeature: + flags = MediaPlayerEntityFeature(0) + playback_commands = self.get_attribute_value( + Capability.MEDIA_PLAYBACK, Attribute.SUPPORTED_PLAYBACK_COMMANDS + ) + if "play" in playback_commands: + flags |= MediaPlayerEntityFeature.PLAY + if "pause" in playback_commands: + flags |= MediaPlayerEntityFeature.PAUSE + if "stop" in playback_commands: + flags |= MediaPlayerEntityFeature.STOP + if "rewind" in playback_commands: + flags |= MediaPlayerEntityFeature.PREVIOUS_TRACK + if "fastForward" in playback_commands: + flags |= MediaPlayerEntityFeature.NEXT_TRACK + if self.supports_capability(Capability.AUDIO_VOLUME): + flags |= ( + MediaPlayerEntityFeature.VOLUME_SET + | MediaPlayerEntityFeature.VOLUME_STEP + ) + if self.supports_capability(Capability.AUDIO_MUTE): + flags |= MediaPlayerEntityFeature.VOLUME_MUTE + if self.supports_capability(Capability.SWITCH): + flags |= ( + MediaPlayerEntityFeature.TURN_ON | MediaPlayerEntityFeature.TURN_OFF + ) + if self.supports_capability(Capability.MEDIA_INPUT_SOURCE): + flags |= MediaPlayerEntityFeature.SELECT_SOURCE + if self.supports_capability(Capability.MEDIA_PLAYBACK_SHUFFLE): + flags |= MediaPlayerEntityFeature.SHUFFLE_SET + if self.supports_capability(Capability.MEDIA_PLAYBACK_REPEAT): + flags |= MediaPlayerEntityFeature.REPEAT_SET + return flags + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the media player off.""" + await self.execute_device_command( + Capability.SWITCH, + Command.OFF, + ) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the media player on.""" + await self.execute_device_command( + Capability.SWITCH, + Command.ON, + ) + + async def async_mute_volume(self, mute: bool) -> None: + """Mute volume.""" + await self.execute_device_command( + Capability.AUDIO_MUTE, + Command.SET_MUTE, + argument="muted" if mute else "unmuted", + ) + + async def async_set_volume_level(self, volume: float) -> None: + """Set volume level.""" + await self.execute_device_command( + Capability.AUDIO_VOLUME, + Command.SET_VOLUME, + argument=int(volume * 100), + ) + + async def async_volume_up(self) -> None: + """Increase volume.""" + await self.execute_device_command( + Capability.AUDIO_VOLUME, + Command.VOLUME_UP, + ) + + async def async_volume_down(self) -> None: + """Decrease volume.""" + await self.execute_device_command( + Capability.AUDIO_VOLUME, + Command.VOLUME_DOWN, + ) + + async def async_media_play(self) -> None: + """Play media.""" + await self.execute_device_command( + Capability.MEDIA_PLAYBACK, + Command.PLAY, + ) + + async def async_media_pause(self) -> None: + """Pause media.""" + await self.execute_device_command( + Capability.MEDIA_PLAYBACK, + Command.PAUSE, + ) + + async def async_media_stop(self) -> None: + """Stop media.""" + await self.execute_device_command( + Capability.MEDIA_PLAYBACK, + Command.STOP, + ) + + async def async_media_previous_track(self) -> None: + """Previous track.""" + await self.execute_device_command( + Capability.MEDIA_PLAYBACK, + Command.REWIND, + ) + + async def async_media_next_track(self) -> None: + """Next track.""" + await self.execute_device_command( + Capability.MEDIA_PLAYBACK, + Command.FAST_FORWARD, + ) + + async def async_select_source(self, source: str) -> None: + """Select source.""" + await self.execute_device_command( + Capability.MEDIA_INPUT_SOURCE, + Command.SET_INPUT_SOURCE, + argument=source, + ) + + async def async_set_shuffle(self, shuffle: bool) -> None: + """Set shuffle mode.""" + await self.execute_device_command( + Capability.MEDIA_PLAYBACK_SHUFFLE, + Command.SET_PLAYBACK_SHUFFLE, + argument="enabled" if shuffle else "disabled", + ) + + async def async_set_repeat(self, repeat: RepeatMode) -> None: + """Set repeat mode.""" + await self.execute_device_command( + Capability.MEDIA_PLAYBACK_REPEAT, + Command.SET_PLAYBACK_REPEAT_MODE, + argument=HA_REPEAT_MODE_TO_SMARTTHINGS[repeat], + ) + + @property + def media_title(self) -> str | None: + """Title of current playing media.""" + if ( + not self.supports_capability(Capability.AUDIO_TRACK_DATA) + or ( + track_data := self.get_attribute_value( + Capability.AUDIO_TRACK_DATA, Attribute.AUDIO_TRACK_DATA + ) + ) + is None + ): + return None + return track_data.get("title", None) + + @property + def media_artist(self) -> str | None: + """Artist of current playing media.""" + if ( + not self.supports_capability(Capability.AUDIO_TRACK_DATA) + or ( + track_data := self.get_attribute_value( + Capability.AUDIO_TRACK_DATA, Attribute.AUDIO_TRACK_DATA + ) + ) + is None + ): + return None + return track_data.get("artist") + + @property + def state(self) -> MediaPlayerState | None: + """State of the media player.""" + if self.supports_capability(Capability.SWITCH): + if self.get_attribute_value(Capability.SWITCH, Attribute.SWITCH) == "on": + if ( + self.source is not None + and self.source in CONTROLLABLE_SOURCES + and self.get_attribute_value( + Capability.MEDIA_PLAYBACK, Attribute.PLAYBACK_STATUS + ) + in VALUE_TO_STATE + ): + return VALUE_TO_STATE[ + self.get_attribute_value( + Capability.MEDIA_PLAYBACK, Attribute.PLAYBACK_STATUS + ) + ] + return MediaPlayerState.ON + return MediaPlayerState.OFF + return VALUE_TO_STATE[ + self.get_attribute_value( + Capability.MEDIA_PLAYBACK, Attribute.PLAYBACK_STATUS + ) + ] + + @property + def is_volume_muted(self) -> bool: + """Returns if the volume is muted.""" + return ( + self.get_attribute_value(Capability.AUDIO_MUTE, Attribute.MUTE) == "muted" + ) + + @property + def volume_level(self) -> float: + """Volume level.""" + return self.get_attribute_value(Capability.AUDIO_VOLUME, Attribute.VOLUME) / 100 + + @property + def source(self) -> str | None: + """Input source.""" + if self.supports_capability(Capability.MEDIA_INPUT_SOURCE): + return self.get_attribute_value( + Capability.MEDIA_INPUT_SOURCE, Attribute.INPUT_SOURCE + ) + if self.supports_capability(Capability.SAMSUNG_VD_AUDIO_INPUT_SOURCE): + return self.get_attribute_value( + Capability.SAMSUNG_VD_AUDIO_INPUT_SOURCE, Attribute.INPUT_SOURCE + ) + return None + + @property + def source_list(self) -> list[str] | None: + """List of input sources.""" + if self.supports_capability(Capability.MEDIA_INPUT_SOURCE): + return self.get_attribute_value( + Capability.MEDIA_INPUT_SOURCE, Attribute.SUPPORTED_INPUT_SOURCES + ) + if self.supports_capability(Capability.SAMSUNG_VD_AUDIO_INPUT_SOURCE): + return self.get_attribute_value( + Capability.SAMSUNG_VD_AUDIO_INPUT_SOURCE, + Attribute.SUPPORTED_INPUT_SOURCES, + ) + return None + + @property + def shuffle(self) -> bool | None: + """Returns if shuffle mode is set.""" + if self.supports_capability(Capability.MEDIA_PLAYBACK_SHUFFLE): + return ( + self.get_attribute_value( + Capability.MEDIA_PLAYBACK_SHUFFLE, Attribute.PLAYBACK_SHUFFLE + ) + == "enabled" + ) + return None + + @property + def repeat(self) -> RepeatMode | None: + """Returns if repeat mode is set.""" + if self.supports_capability(Capability.MEDIA_PLAYBACK_REPEAT): + return REPEAT_MODE_TO_HA[ + self.get_attribute_value( + Capability.MEDIA_PLAYBACK_REPEAT, Attribute.PLAYBACK_REPEAT_MODE + ) + ] + return None diff --git a/homeassistant/components/smartthings/number.py b/homeassistant/components/smartthings/number.py index cbd200e20b6..2f2ac7903f2 100644 --- a/homeassistant/components/smartthings/number.py +++ b/homeassistant/components/smartthings/number.py @@ -4,7 +4,7 @@ from __future__ import annotations from pysmartthings import Attribute, Capability, Command, SmartThings -from homeassistant.components.number import NumberEntity +from homeassistant.components.number import NumberEntity, NumberMode from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback @@ -32,13 +32,12 @@ class SmartThingsWasherRinseCyclesNumberEntity(SmartThingsEntity, NumberEntity): _attr_translation_key = "washer_rinse_cycles" _attr_native_step = 1.0 + _attr_mode = NumberMode.BOX def __init__(self, client: SmartThings, device: FullDevice) -> None: """Initialize the instance.""" super().__init__(client, device, {Capability.CUSTOM_WASHER_RINSE_CYCLES}) - self._attr_unique_id = ( - f"{device.device.device_id}_{MAIN}_{Capability.CUSTOM_WASHER_RINSE_CYCLES}" - ) + self._attr_unique_id = f"{device.device.device_id}_{MAIN}_{Capability.CUSTOM_WASHER_RINSE_CYCLES}_{Attribute.WASHER_RINSE_CYCLES}_{Attribute.WASHER_RINSE_CYCLES}" @property def options(self) -> list[int]: diff --git a/homeassistant/components/smartthings/select.py b/homeassistant/components/smartthings/select.py index 6011b7947b7..f0a483b1329 100644 --- a/homeassistant/components/smartthings/select.py +++ b/homeassistant/components/smartthings/select.py @@ -28,6 +28,15 @@ class SmartThingsSelectDescription(SelectEntityDescription): CAPABILITIES_TO_SELECT: dict[Capability | str, SmartThingsSelectDescription] = { + Capability.DISHWASHER_OPERATING_STATE: SmartThingsSelectDescription( + key=Capability.DISHWASHER_OPERATING_STATE, + name=None, + translation_key="operating_state", + requires_remote_control_status=True, + options_attribute=Attribute.SUPPORTED_MACHINE_STATES, + status_attribute=Attribute.MACHINE_STATE, + command=Command.SET_MACHINE_STATE, + ), Capability.DRYER_OPERATING_STATE: SmartThingsSelectDescription( key=Capability.DRYER_OPERATING_STATE, name=None, @@ -83,9 +92,7 @@ class SmartThingsSelectEntity(SmartThingsEntity, SelectEntity): capabilities.add(Capability.REMOTE_CONTROL_STATUS) super().__init__(client, device, capabilities) self.entity_description = entity_description - self._attr_unique_id = ( - f"{device.device.device_id}_{MAIN}_{entity_description.key}" - ) + self._attr_unique_id = f"{device.device.device_id}_{MAIN}_{entity_description.key}_{entity_description.status_attribute}_{entity_description.status_attribute}" @property def options(self) -> list[str]: diff --git a/homeassistant/components/smartthings/sensor.py b/homeassistant/components/smartthings/sensor.py index ee8550e4f06..424483d9617 100644 --- a/homeassistant/components/smartthings/sensor.py +++ b/homeassistant/components/smartthings/sensor.py @@ -7,8 +7,10 @@ from dataclasses import dataclass from datetime import datetime from typing import Any, cast -from pysmartthings import Attribute, Capability, SmartThings, Status +from pysmartthings import Attribute, Capability, ComponentStatus, SmartThings, Status +from homeassistant.components.automation import automations_with_entity +from homeassistant.components.script import scripts_with_entity from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, @@ -29,11 +31,17 @@ from homeassistant.const import ( UnitOfVolume, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from homeassistant.helpers.issue_registry import ( + IssueSeverity, + async_create_issue, + async_delete_issue, +) from homeassistant.util import dt as dt_util from . import FullDevice, SmartThingsConfigEntry -from .const import MAIN +from .const import DOMAIN, MAIN from .entity import SmartThingsEntity THERMOSTAT_CAPABILITIES = { @@ -128,11 +136,11 @@ class SmartThingsSensorEntityDescription(SensorEntityDescription): value_fn: Callable[[Any], str | float | int | datetime | None] = lambda value: value extra_state_attributes_fn: Callable[[Any], dict[str, Any]] | None = None - unique_id_separator: str = "." capability_ignore_list: list[set[Capability]] | None = None options_attribute: Attribute | None = None exists_fn: Callable[[Status], bool] | None = None use_temperature_unit: bool = False + deprecated: Callable[[ComponentStatus], str | None] | None = None CAPABILITY_TO_SENSORS: dict[ @@ -189,6 +197,17 @@ CAPABILITY_TO_SENSORS: dict[ key=Attribute.VOLUME, translation_key="audio_volume", native_unit_of_measurement=PERCENTAGE, + deprecated=( + lambda status: "media_player" + if all( + capability in status + for capability in ( + Capability.AUDIO_MUTE, + Capability.MEDIA_PLAYBACK, + ) + ) + else None + ), ) ] }, @@ -463,6 +482,7 @@ CAPABILITY_TO_SENSORS: dict[ device_class=SensorDeviceClass.ENUM, options_attribute=Attribute.SUPPORTED_INPUT_SOURCES, value_fn=lambda value: value.lower() if value else None, + deprecated=lambda _: "media_player", ) ] }, @@ -471,6 +491,7 @@ CAPABILITY_TO_SENSORS: dict[ SmartThingsSensorEntityDescription( key=Attribute.PLAYBACK_REPEAT_MODE, translation_key="media_playback_repeat", + deprecated=lambda _: "media_player", ) ] }, @@ -479,6 +500,7 @@ CAPABILITY_TO_SENSORS: dict[ SmartThingsSensorEntityDescription( key=Attribute.PLAYBACK_SHUFFLE, translation_key="media_playback_shuffle", + deprecated=lambda _: "media_player", ) ] }, @@ -497,6 +519,7 @@ CAPABILITY_TO_SENSORS: dict[ ], device_class=SensorDeviceClass.ENUM, value_fn=lambda value: MEDIA_PLAYBACK_STATE_MAP.get(value, value), + deprecated=lambda _: "media_player", ) ] }, @@ -675,6 +698,15 @@ CAPABILITY_TO_SENSORS: dict[ ) ] }, + Capability.RELATIVE_BRIGHTNESS: { + Attribute.BRIGHTNESS_INTENSITY: [ + SmartThingsSensorEntityDescription( + key=Attribute.BRIGHTNESS_INTENSITY, + translation_key="brightness_intensity", + state_class=SensorStateClass.MEASUREMENT, + ) + ] + }, Capability.RELATIVE_HUMIDITY_MEASUREMENT: { Attribute.HUMIDITY: [ SmartThingsSensorEntityDescription( @@ -846,21 +878,18 @@ CAPABILITY_TO_SENSORS: dict[ Capability.THREE_AXIS: { Attribute.THREE_AXIS: [ SmartThingsSensorEntityDescription( - key="X Coordinate", + key="x_coordinate", translation_key="x_coordinate", - unique_id_separator=" ", value_fn=lambda value: value[0], ), SmartThingsSensorEntityDescription( - key="Y Coordinate", + key="y_coordinate", translation_key="y_coordinate", - unique_id_separator=" ", value_fn=lambda value: value[1], ), SmartThingsSensorEntityDescription( - key="Z Coordinate", + key="z_coordinate", translation_key="z_coordinate", - unique_id_separator=" ", value_fn=lambda value: value[2], ), ] @@ -1037,7 +1066,7 @@ class SmartThingsSensor(SmartThingsEntity, SensorEntity): if entity_description.use_temperature_unit: capabilities_to_subscribe.add(Capability.TEMPERATURE_MEASUREMENT) super().__init__(client, device, capabilities_to_subscribe) - self._attr_unique_id = f"{device.device.device_id}{entity_description.unique_id_separator}{entity_description.key}" + self._attr_unique_id = f"{device.device.device_id}_{MAIN}_{capability}_{attribute}_{entity_description.key}" self._attribute = attribute self.capability = capability self.entity_description = entity_description @@ -1084,3 +1113,53 @@ class SmartThingsSensor(SmartThingsEntity, SensorEntity): return [] return [option.lower() for option in options] return super().options + + async def async_added_to_hass(self) -> None: + """Call when entity is added to hass.""" + await super().async_added_to_hass() + if ( + not self.entity_description.deprecated + or (reason := self.entity_description.deprecated(self.device.status[MAIN])) + is None + ): + return + automations = automations_with_entity(self.hass, self.entity_id) + scripts = scripts_with_entity(self.hass, self.entity_id) + if not automations and not scripts: + return + + entity_reg: er.EntityRegistry = er.async_get(self.hass) + items_list = [ + f"- [{item.original_name}](/config/{integration}/edit/{item.unique_id})" + for integration, entities in ( + ("automation", automations), + ("script", scripts), + ) + for entity_id in entities + if (item := entity_reg.async_get(entity_id)) + ] + + async_create_issue( + self.hass, + DOMAIN, + f"deprecated_{reason}_{self.entity_id}", + breaks_in_ha_version="2025.10.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key=f"deprecated_{reason}", + translation_placeholders={ + "entity": self.entity_id, + "items": "\n".join(items_list), + }, + ) + + async def async_will_remove_from_hass(self) -> None: + """Call when entity will be removed from hass.""" + await super().async_will_remove_from_hass() + if ( + not self.entity_description.deprecated + or (reason := self.entity_description.deprecated(self.device.status[MAIN])) + is None + ): + return + async_delete_issue(self.hass, DOMAIN, f"deprecated_{reason}_{self.entity_id}") diff --git a/homeassistant/components/smartthings/strings.json b/homeassistant/components/smartthings/strings.json index 25872dca82c..fc3ca66a3af 100644 --- a/homeassistant/components/smartthings/strings.json +++ b/homeassistant/components/smartthings/strings.json @@ -36,6 +36,9 @@ "door": { "name": "[%key:component::binary_sensor::entity_component::door::name%]" }, + "dryer_wrinkle_prevent_active": { + "name": "Wrinkle prevent active" + }, "filter_status": { "name": "Filter status" }, @@ -56,6 +59,9 @@ } }, "button": { + "reset_water_filter": { + "name": "Reset water filter" + }, "stop": { "name": "[%key:common::action::stop%]" } @@ -103,7 +109,7 @@ "state": { "run": "[%key:component::smartthings::entity::sensor::dishwasher_machine_state::state::run%]", "pause": "[%key:common::state::paused%]", - "stop": "[%key:component::smartthings::entity::sensor::dishwasher_machine_state::state::stop%]" + "stop": "[%key:common::state::stopped%]" } } }, @@ -148,7 +154,7 @@ "state": { "pause": "[%key:common::state::paused%]", "run": "Running", - "stop": "Stopped" + "stop": "[%key:common::state::stopped%]" } }, "dishwasher_job_state": { @@ -177,7 +183,7 @@ "state": { "pause": "[%key:common::state::paused%]", "run": "[%key:component::smartthings::entity::sensor::dishwasher_machine_state::state::run%]", - "stop": "[%key:component::smartthings::entity::sensor::dishwasher_machine_state::state::stop%]" + "stop": "[%key:common::state::stopped%]" } }, "dryer_job_state": { @@ -342,6 +348,9 @@ "refrigeration_setpoint": { "name": "[%key:component::smartthings::entity::sensor::oven_setpoint::name%]" }, + "brightness_intensity": { + "name": "Brightness intensity" + }, "robot_cleaner_cleaning_mode": { "name": "Cleaning mode", "state": { @@ -432,7 +441,7 @@ "state": { "pause": "[%key:common::state::paused%]", "run": "[%key:component::smartthings::entity::sensor::dishwasher_machine_state::state::run%]", - "stop": "[%key:component::smartthings::entity::sensor::dishwasher_machine_state::state::stop%]" + "stop": "[%key:common::state::stopped%]" } }, "washer_job_state": { @@ -458,15 +467,37 @@ } }, "switch": { + "bubble_soak": { + "name": "Bubble Soak" + }, "wrinkle_prevent": { "name": "Wrinkle prevent" + }, + "ice_maker": { + "name": "Ice maker" } } }, "issues": { "deprecated_binary_valve": { "title": "Deprecated valve binary sensor detected in some automations or scripts", - "description": "The valve binary sensor `{entity}` is deprecated and is used in the following automations or scripts:\n{items}\n\nA valve entity with controls is available and should be used going forward; Please use it on the above automations or scripts to fix this issue." + "description": "The valve binary sensor `{entity}` is deprecated and is used in the following automations or scripts:\n{items}\n\nA valve entity with controls is available and should be used going forward. Please use the new valve entity in the above automations or scripts to fix this issue." + }, + "deprecated_binary_fridge_door": { + "title": "Deprecated refrigerator door binary sensor detected in some automations or scripts", + "description": "The refrigerator door binary sensor `{entity}` is deprecated and is used in the following automations or scripts:\n{items}\n\nSeparate entities for cooler and freezer door are available and should be used going forward. Please use them in the above automations or scripts to fix this issue." + }, + "deprecated_switch_appliance": { + "title": "Deprecated switch detected in some automations or scripts", + "description": "The switch `{entity}` is deprecated because the actions did not work, so it has been replaced with a binary sensor instead.\n\nThe switch was used in the following automations or scripts:\n{items}\n\nPlease use the new binary sensor in the above automations or scripts to fix this issue." + }, + "deprecated_switch_media_player": { + "title": "[%key:component::smartthings::issues::deprecated_switch_appliance::title%]", + "description": "The switch `{entity}` is deprecated and a media player entity has been added to replace it.\n\nThe switch was used in the following automations or scripts:\n{items}\n\nPlease use the new media player entity in the above automations or scripts to fix this issue." + }, + "deprecated_media_player": { + "title": "Deprecated sensor detected in some automations or scripts", + "description": "The sensor `{entity}` is deprecated because it has been replaced with a media player entity.\n\nThe sensor was used in the following automations or scripts:\n{items}\n\nPlease use the new media player entity in the above automations or scripts to fix this issue." } } } diff --git a/homeassistant/components/smartthings/switch.py b/homeassistant/components/smartthings/switch.py index 6e0dc1ac93d..e5b74de3241 100644 --- a/homeassistant/components/smartthings/switch.py +++ b/homeassistant/components/smartthings/switch.py @@ -5,14 +5,22 @@ from __future__ import annotations from dataclasses import dataclass from typing import Any -from pysmartthings import Attribute, Capability, Command, SmartThings +from pysmartthings import Attribute, Capability, Category, Command, SmartThings +from homeassistant.components.automation import automations_with_entity +from homeassistant.components.script import scripts_with_entity from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from homeassistant.helpers.issue_registry import ( + IssueSeverity, + async_create_issue, + async_delete_issue, +) from . import FullDevice, SmartThingsConfigEntry -from .const import MAIN +from .const import DOMAIN, MAIN from .entity import SmartThingsEntity CAPABILITIES = ( @@ -35,6 +43,7 @@ class SmartThingsSwitchEntityDescription(SwitchEntityDescription): """Describe a SmartThings switch entity.""" status_attribute: Attribute + component_translation_key: dict[str, str] | None = None @dataclass(frozen=True, kw_only=True) @@ -59,6 +68,20 @@ CAPABILITY_TO_COMMAND_SWITCHES: dict[ command=Command.SET_DRYER_WRINKLE_PREVENT, ) } +CAPABILITY_TO_SWITCHES: dict[Capability | str, SmartThingsSwitchEntityDescription] = { + Capability.SAMSUNG_CE_WASHER_BUBBLE_SOAK: SmartThingsSwitchEntityDescription( + key=Capability.SAMSUNG_CE_WASHER_BUBBLE_SOAK, + translation_key="bubble_soak", + status_attribute=Attribute.STATUS, + ), + Capability.SWITCH: SmartThingsSwitchEntityDescription( + key=Capability.SWITCH, + status_attribute=Attribute.SWITCH, + component_translation_key={ + "icemaker": "ice_maker", + }, + ), +} async def async_setup_entry( @@ -86,6 +109,26 @@ async def async_setup_entry( for capability, description in CAPABILITY_TO_COMMAND_SWITCHES.items() if capability in device.status[MAIN] ) + entities.extend( + SmartThingsSwitch( + entry_data.client, + device, + description, + Capability(capability), + component, + ) + for device in entry_data.devices.values() + for capability, description in CAPABILITY_TO_SWITCHES.items() + for component in device.status + if capability in device.status[component] + and ( + (description.component_translation_key is None and component == MAIN) + or ( + description.component_translation_key is not None + and component in description.component_translation_key + ) + ) + ) async_add_entities(entities) @@ -93,6 +136,7 @@ class SmartThingsSwitch(SmartThingsEntity, SwitchEntity): """Define a SmartThings switch.""" entity_description: SmartThingsSwitchEntityDescription + created_issue: bool = False def __init__( self, @@ -100,14 +144,19 @@ class SmartThingsSwitch(SmartThingsEntity, SwitchEntity): device: FullDevice, entity_description: SmartThingsSwitchEntityDescription, capability: Capability, + component: str = MAIN, ) -> None: """Initialize the switch.""" - super().__init__(client, device, {capability}) + super().__init__(client, device, {capability}, component=component) self.entity_description = entity_description self.switch_capability = capability - self._attr_unique_id = device.device.device_id - if capability is not Capability.SWITCH: - self._attr_unique_id = f"{device.device.device_id}_{MAIN}_{capability}" + self._attr_unique_id = f"{device.device.device_id}_{component}_{capability}_{entity_description.status_attribute}_{entity_description.status_attribute}" + if ( + translation_keys := entity_description.component_translation_key + ) is not None and ( + translation_key := translation_keys.get(component) + ) is not None: + self._attr_translation_key = translation_key async def async_turn_off(self, **kwargs: Any) -> None: """Turn the switch off.""" @@ -133,6 +182,70 @@ class SmartThingsSwitch(SmartThingsEntity, SwitchEntity): == "on" ) + async def async_added_to_hass(self) -> None: + """Call when entity is added to hass.""" + await super().async_added_to_hass() + media_player = all( + capability in self.device.status[MAIN] + for capability in ( + Capability.AUDIO_MUTE, + Capability.AUDIO_VOLUME, + Capability.MEDIA_PLAYBACK, + ) + ) + if ( + self.entity_description != SWITCH + and self.device.device.components[MAIN].manufacturer_category + not in { + Category.CLOTHING_CARE_MACHINE, + Category.COOKTOP, + Category.DRYER, + Category.WASHER, + Category.MICROWAVE, + Category.DISHWASHER, + } + ) or (self.entity_description != SWITCH and not media_player): + return + automations = automations_with_entity(self.hass, self.entity_id) + scripts = scripts_with_entity(self.hass, self.entity_id) + if not automations and not scripts: + return + + entity_reg: er.EntityRegistry = er.async_get(self.hass) + items_list = [ + f"- [{item.original_name}](/config/{integration}/edit/{item.unique_id})" + for integration, entities in ( + ("automation", automations), + ("script", scripts), + ) + for entity_id in entities + if (item := entity_reg.async_get(entity_id)) + ] + + identifier = "media_player" if media_player else "appliance" + + self.created_issue = True + async_create_issue( + self.hass, + DOMAIN, + f"deprecated_switch_{self.entity_id}", + breaks_in_ha_version="2025.10.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key=f"deprecated_switch_{identifier}", + translation_placeholders={ + "entity": self.entity_id, + "items": "\n".join(items_list), + }, + ) + + async def async_will_remove_from_hass(self) -> None: + """Call when entity will be removed from hass.""" + await super().async_will_remove_from_hass() + if not self.created_issue: + return + async_delete_issue(self.hass, DOMAIN, f"deprecated_switch_{self.entity_id}") + class SmartThingsCommandSwitch(SmartThingsSwitch): """Define a SmartThings command switch.""" diff --git a/homeassistant/components/smartthings/valve.py b/homeassistant/components/smartthings/valve.py index 3c401c087ec..4279d528f8b 100644 --- a/homeassistant/components/smartthings/valve.py +++ b/homeassistant/components/smartthings/valve.py @@ -47,8 +47,8 @@ class SmartThingsValve(SmartThingsEntity, ValveEntity): """Init the class.""" super().__init__(client, device, {Capability.VALVE}) self._attr_device_class = DEVICE_CLASS_MAP.get( - device.device.components[0].user_category - or device.device.components[0].manufacturer_category + device.device.components[MAIN].user_category + or device.device.components[MAIN].manufacturer_category ) async def async_open_valve(self) -> None: diff --git a/homeassistant/components/smarty/config_flow.py b/homeassistant/components/smarty/config_flow.py index 9a55356a990..a7f0bdd4123 100644 --- a/homeassistant/components/smarty/config_flow.py +++ b/homeassistant/components/smarty/config_flow.py @@ -1,5 +1,6 @@ """Config flow for Smarty integration.""" +import logging from typing import Any from pysmarty2 import Smarty @@ -10,6 +11,8 @@ from homeassistant.const import CONF_HOST, CONF_NAME from .const import DOMAIN +_LOGGER = logging.getLogger(__name__) + class SmartyConfigFlow(ConfigFlow, domain=DOMAIN): """Smarty config flow.""" @@ -20,7 +23,8 @@ class SmartyConfigFlow(ConfigFlow, domain=DOMAIN): try: if smarty.update(): return None - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") return "unknown" else: return "cannot_connect" diff --git a/homeassistant/components/smlight/__init__.py b/homeassistant/components/smlight/__init__.py index 8f3e675ef6b..b3a6860e5b7 100644 --- a/homeassistant/components/smlight/__init__.py +++ b/homeassistant/components/smlight/__init__.py @@ -2,7 +2,7 @@ from __future__ import annotations -from pysmlight import Api2, Info, Radio +from pysmlight import Api2 from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant @@ -50,9 +50,3 @@ async def async_setup_entry(hass: HomeAssistant, entry: SmConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: SmConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - -def get_radio(info: Info, idx: int) -> Radio: - """Get the radio object from the info.""" - assert info.radios is not None - return info.radios[idx] diff --git a/homeassistant/components/smlight/config_flow.py b/homeassistant/components/smlight/config_flow.py index fcfc364d983..ce4f8f43233 100644 --- a/homeassistant/components/smlight/config_flow.py +++ b/homeassistant/components/smlight/config_flow.py @@ -51,14 +51,14 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN): self.client = Api2(self._host, session=async_get_clientsession(self.hass)) try: - info = await self.client.get_info() - self._host = str(info.device_ip) - self._device_name = str(info.hostname) - - if info.model not in Devices: - return self.async_abort(reason="unsupported_device") - if not await self._async_check_auth_required(user_input): + info = await self.client.get_info() + self._host = str(info.device_ip) + self._device_name = str(info.hostname) + + if info.model not in Devices: + return self.async_abort(reason="unsupported_device") + return await self._async_complete_entry(user_input) except SmlightConnectionError: errors["base"] = "cannot_connect" @@ -128,13 +128,13 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN): if user_input is not None: try: - info = await self.client.get_info() - - if info.model not in Devices: - return self.async_abort(reason="unsupported_device") - if not await self._async_check_auth_required(user_input): - return await self._async_complete_entry(user_input) + info = await self.client.get_info() + + if info.model not in Devices: + return self.async_abort(reason="unsupported_device") + + return await self._async_complete_entry(user_input) except SmlightConnectionError: return self.async_abort(reason="cannot_connect") diff --git a/homeassistant/components/smlight/manifest.json b/homeassistant/components/smlight/manifest.json index 3f527d1fcd9..e9025203b8c 100644 --- a/homeassistant/components/smlight/manifest.json +++ b/homeassistant/components/smlight/manifest.json @@ -11,7 +11,7 @@ "documentation": "https://www.home-assistant.io/integrations/smlight", "integration_type": "device", "iot_class": "local_push", - "requirements": ["pysmlight==0.2.3"], + "requirements": ["pysmlight==0.2.4"], "zeroconf": [ { "type": "_slzb-06._tcp.local." diff --git a/homeassistant/components/smlight/sensor.py b/homeassistant/components/smlight/sensor.py index 57a08d177d4..2f57843b5eb 100644 --- a/homeassistant/components/smlight/sensor.py +++ b/homeassistant/components/smlight/sensor.py @@ -37,7 +37,7 @@ class SmSensorEntityDescription(SensorEntityDescription): class SmInfoEntityDescription(SensorEntityDescription): """Class describing SMLIGHT information entities.""" - value_fn: Callable[[Info], StateType] + value_fn: Callable[[Info, int], StateType] INFO: list[SmInfoEntityDescription] = [ @@ -46,24 +46,25 @@ INFO: list[SmInfoEntityDescription] = [ translation_key="device_mode", device_class=SensorDeviceClass.ENUM, options=["eth", "wifi", "usb"], - value_fn=lambda x: x.coord_mode, + value_fn=lambda x, idx: x.coord_mode, ), SmInfoEntityDescription( key="firmware_channel", translation_key="firmware_channel", device_class=SensorDeviceClass.ENUM, options=["dev", "release"], - value_fn=lambda x: x.fw_channel, - ), - SmInfoEntityDescription( - key="zigbee_type", - translation_key="zigbee_type", - device_class=SensorDeviceClass.ENUM, - options=["coordinator", "router", "thread"], - value_fn=lambda x: x.zb_type, + value_fn=lambda x, idx: x.fw_channel, ), ] +RADIO_INFO = SmInfoEntityDescription( + key="zigbee_type", + translation_key="zigbee_type", + device_class=SensorDeviceClass.ENUM, + options=["coordinator", "router", "thread"], + value_fn=lambda x, idx: x.radios[idx].zb_type, +) + SENSORS: list[SmSensorEntityDescription] = [ SmSensorEntityDescription( @@ -102,6 +103,16 @@ SENSORS: list[SmSensorEntityDescription] = [ ), ] +EXTRA_SENSOR = SmSensorEntityDescription( + key="zigbee_temperature_2", + translation_key="zigbee_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + value_fn=lambda x: x.zb_temp2, +) + UPTIME: list[SmSensorEntityDescription] = [ SmSensorEntityDescription( key="core_uptime", @@ -127,8 +138,7 @@ async def async_setup_entry( ) -> None: """Set up SMLIGHT sensor based on a config entry.""" coordinator = entry.runtime_data.data - - async_add_entities( + entities: list[SmEntity] = list( chain( (SmInfoSensorEntity(coordinator, description) for description in INFO), (SmSensorEntity(coordinator, description) for description in SENSORS), @@ -136,6 +146,16 @@ async def async_setup_entry( ) ) + entities.extend( + SmInfoSensorEntity(coordinator, RADIO_INFO, idx) + for idx, _ in enumerate(coordinator.data.info.radios) + ) + + if coordinator.data.sensors.zb_temp2 is not None: + entities.append(SmSensorEntity(coordinator, EXTRA_SENSOR)) + + async_add_entities(entities) + class SmSensorEntity(SmEntity, SensorEntity): """Representation of a slzb sensor.""" @@ -172,17 +192,20 @@ class SmInfoSensorEntity(SmEntity, SensorEntity): self, coordinator: SmDataUpdateCoordinator, description: SmInfoEntityDescription, + idx: int = 0, ) -> None: """Initiate slzb sensor.""" super().__init__(coordinator) self.entity_description = description - self._attr_unique_id = f"{coordinator.unique_id}_{description.key}" + self.idx = idx + sensor = f"_{idx}" if idx else "" + self._attr_unique_id = f"{coordinator.unique_id}_{description.key}{sensor}" @property def native_value(self) -> StateType: """Return the sensor value.""" - value = self.entity_description.value_fn(self.coordinator.data.info) + value = self.entity_description.value_fn(self.coordinator.data.info, self.idx) options = self.entity_description.options if isinstance(value, int) and options is not None: diff --git a/homeassistant/components/smlight/update.py b/homeassistant/components/smlight/update.py index 10d142e6221..3143f2f4290 100644 --- a/homeassistant/components/smlight/update.py +++ b/homeassistant/components/smlight/update.py @@ -22,7 +22,6 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback -from . import get_radio from .const import LOGGER from .coordinator import SmConfigEntry, SmFirmwareUpdateCoordinator, SmFwData from .entity import SmEntity @@ -56,7 +55,7 @@ CORE_UPDATE_ENTITY = SmUpdateEntityDescription( ZB_UPDATE_ENTITY = SmUpdateEntityDescription( key="zigbee_update", translation_key="zigbee_update", - installed_version=lambda x, idx: get_radio(x, idx).zb_version, + installed_version=lambda x, idx: x.radios[idx].zb_version, latest_version=zigbee_latest_version, ) @@ -75,7 +74,6 @@ async def async_setup_entry( entities = [SmUpdateEntity(coordinator, CORE_UPDATE_ENTITY)] radios = coordinator.data.info.radios - assert radios is not None entities.extend( SmUpdateEntity(coordinator, ZB_UPDATE_ENTITY, idx) diff --git a/homeassistant/components/snoo/event.py b/homeassistant/components/snoo/event.py index 5932bfd9862..1e50ee46d90 100644 --- a/homeassistant/components/snoo/event.py +++ b/homeassistant/components/snoo/event.py @@ -31,6 +31,7 @@ async def async_setup_entry( "power", "status_requested", "sticky_white_noise_updated", + "config_change", ], ), ) diff --git a/homeassistant/components/snoo/manifest.json b/homeassistant/components/snoo/manifest.json index 4084a7e3e79..839382b2d84 100644 --- a/homeassistant/components/snoo/manifest.json +++ b/homeassistant/components/snoo/manifest.json @@ -7,5 +7,5 @@ "iot_class": "cloud_push", "loggers": ["snoo"], "quality_scale": "bronze", - "requirements": ["python-snoo==0.6.4"] + "requirements": ["python-snoo==0.6.5"] } diff --git a/homeassistant/components/snoo/strings.json b/homeassistant/components/snoo/strings.json index f7cf6a4820b..1c86c066c7f 100644 --- a/homeassistant/components/snoo/strings.json +++ b/homeassistant/components/snoo/strings.json @@ -55,7 +55,8 @@ "activity": "Activity press", "power": "Power button pressed", "status_requested": "Status requested", - "sticky_white_noise_updated": "Sleepytime sounds updated" + "sticky_white_noise_updated": "Sleepytime sounds updated", + "config_change": "Config changed" } } } @@ -70,7 +71,7 @@ "level2": "Level 2", "level3": "Level 3", "level4": "Level 4", - "stop": "Stopped", + "stop": "[%key:common::state::stopped%]", "pretimeout": "Pre-timeout", "timeout": "Timeout" } @@ -88,7 +89,7 @@ "level2": "[%key:component::snoo::entity::sensor::state::state::level2%]", "level3": "[%key:component::snoo::entity::sensor::state::state::level3%]", "level4": "[%key:component::snoo::entity::sensor::state::state::level4%]", - "stop": "[%key:component::snoo::entity::sensor::state::state::stop%]" + "stop": "[%key:common::state::stopped%]" } } }, diff --git a/homeassistant/components/solaredge/strings.json b/homeassistant/components/solaredge/strings.json index 2b626987546..105a9282a6d 100644 --- a/homeassistant/components/solaredge/strings.json +++ b/homeassistant/components/solaredge/strings.json @@ -5,7 +5,7 @@ "title": "Define the API parameters for this installation", "data": { "name": "The name of this installation", - "site_id": "The SolarEdge site-id", + "site_id": "The SolarEdge site ID", "api_key": "[%key:common::config_flow::data::api_key%]" } } @@ -14,7 +14,7 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]", "site_not_active": "The site is not active", - "could_not_connect": "Could not connect to the solaredge API" + "could_not_connect": "Could not connect to the SolarEdge API" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" @@ -65,7 +65,7 @@ "name": "Grid power" }, "storage_power": { - "name": "Stored power" + "name": "Storage power" }, "purchased_energy": { "name": "Imported energy" diff --git a/homeassistant/components/spotify/config_flow.py b/homeassistant/components/spotify/config_flow.py index d99fa7793df..3478887d64c 100644 --- a/homeassistant/components/spotify/config_flow.py +++ b/homeassistant/components/spotify/config_flow.py @@ -41,7 +41,8 @@ class SpotifyFlowHandler( try: current_user = await spotify.get_current_user() - except Exception: # noqa: BLE001 + except Exception: + self.logger.exception("Error while connecting to Spotify") return self.async_abort(reason="connection_error") name = current_user.display_name diff --git a/homeassistant/components/spotify/strings.json b/homeassistant/components/spotify/strings.json index 90e573a1706..66d837c503f 100644 --- a/homeassistant/components/spotify/strings.json +++ b/homeassistant/components/spotify/strings.json @@ -13,7 +13,7 @@ "authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]", "missing_configuration": "The Spotify integration is not configured. Please follow the documentation.", "no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]", - "reauth_account_mismatch": "The Spotify account authenticated with, does not match the account needed re-authentication.", + "reauth_account_mismatch": "The Spotify account authenticated with does not match the account that needed re-authentication.", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "oauth_error": "[%key:common::config_flow::abort::oauth2_error%]", "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", diff --git a/homeassistant/components/squeezebox/config_flow.py b/homeassistant/components/squeezebox/config_flow.py index 2853ad14217..31dd5b003b7 100644 --- a/homeassistant/components/squeezebox/config_flow.py +++ b/homeassistant/components/squeezebox/config_flow.py @@ -151,7 +151,8 @@ class SqueezeboxConfigFlow(ConfigFlow, domain=DOMAIN): if server.http_status == HTTPStatus.UNAUTHORIZED: return "invalid_auth" return "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unknown exception while validating connection") return "unknown" if "uuid" in status: diff --git a/homeassistant/components/sunweg/__init__.py b/homeassistant/components/sunweg/__init__.py index 86da0a247b1..0dfed0e6bb3 100644 --- a/homeassistant/components/sunweg/__init__.py +++ b/homeassistant/components/sunweg/__init__.py @@ -1,197 +1,39 @@ """The Sun WEG inverter sensor integration.""" -import datetime -import json -import logging - -from sunweg.api import APIHelper -from sunweg.plant import Plant - -from homeassistant import config_entries from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryAuthFailed -from homeassistant.helpers.typing import StateType, UndefinedType -from homeassistant.util import Throttle +from homeassistant.helpers import issue_registry as ir -from .const import CONF_PLANT_ID, DOMAIN, PLATFORMS, DeviceType - -SCAN_INTERVAL = datetime.timedelta(minutes=5) - -_LOGGER = logging.getLogger(__name__) +DOMAIN = "sunweg" -async def async_setup_entry( - hass: HomeAssistant, entry: config_entries.ConfigEntry -) -> bool: +async def async_setup_entry(hass: HomeAssistant, _: ConfigEntry) -> bool: """Load the saved entities.""" - api = APIHelper(entry.data[CONF_USERNAME], entry.data[CONF_PASSWORD]) - if not await hass.async_add_executor_job(api.authenticate): - raise ConfigEntryAuthFailed("Username or Password may be incorrect!") - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = SunWEGData( - api, entry.data[CONF_PLANT_ID] + ir.async_create_issue( + hass, + DOMAIN, + DOMAIN, + is_fixable=False, + severity=ir.IssueSeverity.ERROR, + translation_key="integration_removed", + translation_placeholders={ + "issue": "https://github.com/rokam/sunweg/issues/13", + "entries": "/config/integrations/integration/sunweg", + }, ) - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - hass.data[DOMAIN].pop(entry.entry_id) - if len(hass.data[DOMAIN]) == 0: - hass.data.pop(DOMAIN) - return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + return True -class SunWEGData: - """The class for handling data retrieval.""" - - def __init__( - self, - api: APIHelper, - plant_id: int, - ) -> None: - """Initialize the probe.""" - - self.api = api - self.plant_id = plant_id - self.data: Plant = None - self.previous_values: dict = {} - - @Throttle(SCAN_INTERVAL) - def update(self) -> None: - """Update probe data.""" - _LOGGER.debug("Updating data for plant %s", self.plant_id) - try: - self.data = self.api.plant(self.plant_id) - for inverter in self.data.inverters: - self.api.complete_inverter(inverter) - except json.decoder.JSONDecodeError: - _LOGGER.error("Unable to fetch data from SunWEG server") - _LOGGER.debug("Finished updating data for plant %s", self.plant_id) - - def get_api_value( - self, - variable: str, - device_type: DeviceType, - inverter_id: int = 0, - deep_name: str | None = None, - ): - """Retrieve from a Plant the desired variable value.""" - if device_type == DeviceType.TOTAL: - return self.data.__dict__.get(variable) - - inverter_list = [i for i in self.data.inverters if i.id == inverter_id] - if len(inverter_list) == 0: - return None - inverter = inverter_list[0] - - if device_type == DeviceType.INVERTER: - return inverter.__dict__.get(variable) - if device_type == DeviceType.PHASE: - for phase in inverter.phases: - if phase.name == deep_name: - return phase.__dict__.get(variable) - elif device_type == DeviceType.STRING: - for mppt in inverter.mppts: - for string in mppt.strings: - if string.name == deep_name: - return string.__dict__.get(variable) - return None - - def get_data( - self, - *, - api_variable_key: str, - api_variable_unit: str | None, - deep_name: str | None, - device_type: DeviceType, - inverter_id: int, - name: str | UndefinedType | None, - native_unit_of_measurement: str | None, - never_resets: bool, - previous_value_drop_threshold: float | None, - ) -> tuple[StateType | datetime.datetime, str | None]: - """Get the data.""" - _LOGGER.debug( - "Data request for: %s", - name, - ) - variable = api_variable_key - previous_unit = native_unit_of_measurement - api_value = self.get_api_value(variable, device_type, inverter_id, deep_name) - previous_value = self.previous_values.get(variable) - return_value = api_value - if api_variable_unit is not None: - native_unit_of_measurement = self.get_api_value( - api_variable_unit, - device_type, - inverter_id, - deep_name, - ) - - # If we have a 'drop threshold' specified, then check it and correct if needed - if ( - previous_value_drop_threshold is not None - and previous_value is not None - and api_value is not None - and previous_unit == native_unit_of_measurement - ): - _LOGGER.debug( - ( - "%s - Drop threshold specified (%s), checking for drop... API" - " Value: %s, Previous Value: %s" - ), - name, - previous_value_drop_threshold, - api_value, - previous_value, - ) - diff = float(api_value) - float(previous_value) - - # Check if the value has dropped (negative value i.e. < 0) and it has only - # dropped by a small amount, if so, use the previous value. - # Note - The energy dashboard takes care of drops within 10% - # of the current value, however if the value is low e.g. 0.2 - # and drops by 0.1 it classes as a reset. - if -(previous_value_drop_threshold) <= diff < 0: - _LOGGER.debug( - ( - "Diff is negative, but only by a small amount therefore not a" - " nightly reset, using previous value (%s) instead of api value" - " (%s)" - ), - previous_value, - api_value, - ) - return_value = previous_value - else: - _LOGGER.debug("%s - No drop detected, using API value", name) - - # Lifetime total values should always be increasing, they will never reset, - # however the API sometimes returns 0 values when the clock turns to 00:00 - # local time in that scenario we should just return the previous value - # Scenarios: - # 1 - System has a genuine 0 value when it it first commissioned: - # - will return 0 until a non-zero value is registered - # 2 - System has been running fine but temporarily resets to 0 briefly - # at midnight: - # - will return the previous value - # 3 - HA is restarted during the midnight 'outage' - Not handled: - # - Previous value will not exist meaning 0 will be returned - # - This is an edge case that would be better handled by looking - # up the previous value of the entity from the recorder - if never_resets and api_value == 0 and previous_value: - _LOGGER.debug( - ( - "API value is 0, but this value should never reset, returning" - " previous value (%s) instead" - ), - previous_value, - ) - return_value = previous_value - - self.previous_values[variable] = return_value - - return (return_value, native_unit_of_measurement) +async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: + """Remove a config entry.""" + if not hass.config_entries.async_loaded_entries(DOMAIN): + ir.async_delete_issue(hass, DOMAIN, DOMAIN) + # Remove any remaining disabled or ignored entries + for _entry in hass.config_entries.async_entries(DOMAIN): + hass.async_create_task(hass.config_entries.async_remove(_entry.entry_id)) diff --git a/homeassistant/components/sunweg/config_flow.py b/homeassistant/components/sunweg/config_flow.py index 24df8c02f55..42535a9ef58 100644 --- a/homeassistant/components/sunweg/config_flow.py +++ b/homeassistant/components/sunweg/config_flow.py @@ -1,129 +1,11 @@ """Config flow for Sun WEG integration.""" -from collections.abc import Mapping -from typing import Any +from homeassistant.config_entries import ConfigFlow -from sunweg.api import APIHelper, SunWegApiError -import voluptuous as vol - -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME -from homeassistant.core import callback - -from .const import CONF_PLANT_ID, DOMAIN +from . import DOMAIN class SunWEGConfigFlow(ConfigFlow, domain=DOMAIN): """Config flow class.""" VERSION = 1 - - def __init__(self) -> None: - """Initialise sun weg server flow.""" - self.api: APIHelper = None - self.data: dict[str, Any] = {} - - @callback - def _async_show_user_form(self, step_id: str, errors=None) -> ConfigFlowResult: - """Show the form to the user.""" - default_username = "" - if CONF_USERNAME in self.data: - default_username = self.data[CONF_USERNAME] - data_schema = vol.Schema( - { - vol.Required(CONF_USERNAME, default=default_username): str, - vol.Required(CONF_PASSWORD): str, - } - ) - - return self.async_show_form( - step_id=step_id, data_schema=data_schema, errors=errors - ) - - def _set_auth_data( - self, step: str, username: str, password: str - ) -> ConfigFlowResult | None: - """Set username and password.""" - if self.api: - # Set username and password - self.api.username = username - self.api.password = password - else: - # Initialise the library with the username & password - self.api = APIHelper(username, password) - - try: - if not self.api.authenticate(): - return self._async_show_user_form(step, {"base": "invalid_auth"}) - except SunWegApiError: - return self._async_show_user_form(step, {"base": "timeout_connect"}) - - return None - - async def async_step_user(self, user_input=None) -> ConfigFlowResult: - """Handle the start of the config flow.""" - if not user_input: - return self._async_show_user_form("user") - - # Store authentication info - self.data = user_input - - conf_result = await self.hass.async_add_executor_job( - self._set_auth_data, - "user", - user_input[CONF_USERNAME], - user_input[CONF_PASSWORD], - ) - - return await self.async_step_plant() if conf_result is None else conf_result - - async def async_step_plant(self, user_input=None) -> ConfigFlowResult: - """Handle adding a "plant" to Home Assistant.""" - plant_list = await self.hass.async_add_executor_job(self.api.listPlants) - - if len(plant_list) == 0: - return self.async_abort(reason="no_plants") - - plants = {plant.id: plant.name for plant in plant_list} - - if user_input is None and len(plant_list) > 1: - data_schema = vol.Schema({vol.Required(CONF_PLANT_ID): vol.In(plants)}) - - return self.async_show_form(step_id="plant", data_schema=data_schema) - - if user_input is None and len(plant_list) == 1: - user_input = {CONF_PLANT_ID: plant_list[0].id} - - user_input[CONF_NAME] = plants[user_input[CONF_PLANT_ID]] - await self.async_set_unique_id(user_input[CONF_PLANT_ID]) - self._abort_if_unique_id_configured() - self.data.update(user_input) - return self.async_create_entry(title=self.data[CONF_NAME], data=self.data) - - async def async_step_reauth( - self, entry_data: Mapping[str, Any] - ) -> ConfigFlowResult: - """Handle reauthorization request from SunWEG.""" - self.data.update(entry_data) - return await self.async_step_reauth_confirm() - - async def async_step_reauth_confirm( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle reauthorization flow.""" - if user_input is None: - return self._async_show_user_form("reauth_confirm") - - self.data.update(user_input) - conf_result = await self.hass.async_add_executor_job( - self._set_auth_data, - "reauth_confirm", - user_input[CONF_USERNAME], - user_input[CONF_PASSWORD], - ) - if conf_result is not None: - return conf_result - - return self.async_update_reload_and_abort( - self._get_reauth_entry(), data=self.data - ) diff --git a/homeassistant/components/sunweg/const.py b/homeassistant/components/sunweg/const.py deleted file mode 100644 index 11d24352962..00000000000 --- a/homeassistant/components/sunweg/const.py +++ /dev/null @@ -1,25 +0,0 @@ -"""Define constants for the Sun WEG component.""" - -from enum import Enum - -from homeassistant.const import Platform - - -class DeviceType(Enum): - """Device Type Enum.""" - - TOTAL = 1 - INVERTER = 2 - PHASE = 3 - STRING = 4 - - -CONF_PLANT_ID = "plant_id" - -DEFAULT_PLANT_ID = 0 - -DEFAULT_NAME = "Sun WEG" - -DOMAIN = "sunweg" - -PLATFORMS = [Platform.SENSOR] diff --git a/homeassistant/components/sunweg/manifest.json b/homeassistant/components/sunweg/manifest.json index 3ebe9ef8cb4..3e5c669f37f 100644 --- a/homeassistant/components/sunweg/manifest.json +++ b/homeassistant/components/sunweg/manifest.json @@ -1,10 +1,10 @@ { "domain": "sunweg", "name": "Sun WEG", - "codeowners": ["@rokam"], + "codeowners": [], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/sunweg", "iot_class": "cloud_polling", - "loggers": ["sunweg"], - "requirements": ["sunweg==3.0.2"] + "loggers": [], + "requirements": [] } diff --git a/homeassistant/components/sunweg/sensor/__init__.py b/homeassistant/components/sunweg/sensor/__init__.py deleted file mode 100644 index f71d992bea9..00000000000 --- a/homeassistant/components/sunweg/sensor/__init__.py +++ /dev/null @@ -1,178 +0,0 @@ -"""Read status of SunWEG inverters.""" - -from __future__ import annotations - -import logging -from types import MappingProxyType -from typing import Any - -from sunweg.api import APIHelper -from sunweg.device import Inverter -from sunweg.plant import Plant - -from homeassistant.components.sensor import SensorEntity -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_NAME -from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback - -from .. import SunWEGData -from ..const import CONF_PLANT_ID, DEFAULT_PLANT_ID, DOMAIN, DeviceType -from .inverter import INVERTER_SENSOR_TYPES -from .phase import PHASE_SENSOR_TYPES -from .sensor_entity_description import SunWEGSensorEntityDescription -from .string import STRING_SENSOR_TYPES -from .total import TOTAL_SENSOR_TYPES - -_LOGGER = logging.getLogger(__name__) - - -def get_device_list( - api: APIHelper, config: MappingProxyType[str, Any] -) -> tuple[list[Inverter], int]: - """Retrieve the device list for the selected plant.""" - plant_id = int(config[CONF_PLANT_ID]) - - if plant_id == DEFAULT_PLANT_ID: - plant_info: list[Plant] = api.listPlants() - plant_id = plant_info[0].id - - devices: list[Inverter] = [] - # Get a list of devices for specified plant to add sensors for. - for inverter in api.plant(plant_id).inverters: - api.complete_inverter(inverter) - devices.append(inverter) - return (devices, plant_id) - - -async def async_setup_entry( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddConfigEntryEntitiesCallback, -) -> None: - """Set up the SunWEG sensor.""" - name = config_entry.data[CONF_NAME] - - probe: SunWEGData = hass.data[DOMAIN][config_entry.entry_id] - - devices, plant_id = await hass.async_add_executor_job( - get_device_list, probe.api, config_entry.data - ) - - entities = [ - SunWEGInverter( - probe, - name=f"{name} Total", - unique_id=f"{plant_id}-{description.key}", - description=description, - device_type=DeviceType.TOTAL, - ) - for description in TOTAL_SENSOR_TYPES - ] - - # Add sensors for each device in the specified plant. - entities.extend( - [ - SunWEGInverter( - probe, - name=f"{device.name}", - unique_id=f"{device.sn}-{description.key}", - description=description, - device_type=DeviceType.INVERTER, - inverter_id=device.id, - ) - for device in devices - for description in INVERTER_SENSOR_TYPES - ] - ) - - entities.extend( - [ - SunWEGInverter( - probe, - name=f"{device.name} {phase.name}", - unique_id=f"{device.sn}-{phase.name}-{description.key}", - description=description, - inverter_id=device.id, - device_type=DeviceType.PHASE, - deep_name=phase.name, - ) - for device in devices - for phase in device.phases - for description in PHASE_SENSOR_TYPES - ] - ) - - entities.extend( - [ - SunWEGInverter( - probe, - name=f"{device.name} {string.name}", - unique_id=f"{device.sn}-{string.name}-{description.key}", - description=description, - inverter_id=device.id, - device_type=DeviceType.STRING, - deep_name=string.name, - ) - for device in devices - for mppt in device.mppts - for string in mppt.strings - for description in STRING_SENSOR_TYPES - ] - ) - - async_add_entities(entities, True) - - -class SunWEGInverter(SensorEntity): - """Representation of a SunWEG Sensor.""" - - entity_description: SunWEGSensorEntityDescription - - def __init__( - self, - probe: SunWEGData, - name: str, - unique_id: str, - description: SunWEGSensorEntityDescription, - device_type: DeviceType, - inverter_id: int = 0, - deep_name: str | None = None, - ) -> None: - """Initialize a sensor.""" - self.probe = probe - self.entity_description = description - self.device_type = device_type - self.inverter_id = inverter_id - self.deep_name = deep_name - - self._attr_name = f"{name} {description.name}" - self._attr_unique_id = unique_id - self._attr_icon = ( - description.icon if description.icon is not None else "mdi:solar-power" - ) - - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, str(probe.plant_id))}, - manufacturer="SunWEG", - name=name, - ) - - def update(self) -> None: - """Get the latest data from the Sun WEG API and updates the state.""" - self.probe.update() - ( - self._attr_native_value, - self._attr_native_unit_of_measurement, - ) = self.probe.get_data( - api_variable_key=self.entity_description.api_variable_key, - api_variable_unit=self.entity_description.api_variable_unit, - deep_name=self.deep_name, - device_type=self.device_type, - inverter_id=self.inverter_id, - name=self.entity_description.name, - native_unit_of_measurement=self.native_unit_of_measurement, - never_resets=self.entity_description.never_resets, - previous_value_drop_threshold=self.entity_description.previous_value_drop_threshold, - ) diff --git a/homeassistant/components/sunweg/sensor/inverter.py b/homeassistant/components/sunweg/sensor/inverter.py deleted file mode 100644 index 1010488b38a..00000000000 --- a/homeassistant/components/sunweg/sensor/inverter.py +++ /dev/null @@ -1,70 +0,0 @@ -"""SunWEG Sensor definitions for the Inverter type.""" - -from __future__ import annotations - -from homeassistant.components.sensor import SensorDeviceClass, SensorStateClass -from homeassistant.const import ( - UnitOfEnergy, - UnitOfFrequency, - UnitOfPower, - UnitOfTemperature, -) - -from .sensor_entity_description import SunWEGSensorEntityDescription - -INVERTER_SENSOR_TYPES: tuple[SunWEGSensorEntityDescription, ...] = ( - SunWEGSensorEntityDescription( - key="inverter_energy_today", - name="Energy today", - api_variable_key="_today_energy", - api_variable_unit="_today_energy_metric", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, - device_class=SensorDeviceClass.ENERGY, - state_class=SensorStateClass.TOTAL_INCREASING, - suggested_display_precision=1, - ), - SunWEGSensorEntityDescription( - key="inverter_energy_total", - name="Lifetime energy output", - api_variable_key="_total_energy", - api_variable_unit="_total_energy_metric", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, - device_class=SensorDeviceClass.ENERGY, - suggested_display_precision=1, - state_class=SensorStateClass.TOTAL, - never_resets=True, - ), - SunWEGSensorEntityDescription( - key="inverter_frequency", - name="AC frequency", - api_variable_key="_frequency", - native_unit_of_measurement=UnitOfFrequency.HERTZ, - device_class=SensorDeviceClass.FREQUENCY, - suggested_display_precision=1, - ), - SunWEGSensorEntityDescription( - key="inverter_current_wattage", - name="Output power", - api_variable_key="_power", - api_variable_unit="_power_metric", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=1, - ), - SunWEGSensorEntityDescription( - key="inverter_temperature", - name="Temperature", - api_variable_key="_temperature", - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - device_class=SensorDeviceClass.TEMPERATURE, - icon="mdi:temperature-celsius", - suggested_display_precision=1, - ), - SunWEGSensorEntityDescription( - key="inverter_power_factor", - name="Power Factor", - api_variable_key="_power_factor", - suggested_display_precision=1, - ), -) diff --git a/homeassistant/components/sunweg/sensor/phase.py b/homeassistant/components/sunweg/sensor/phase.py deleted file mode 100644 index d9db6c7c714..00000000000 --- a/homeassistant/components/sunweg/sensor/phase.py +++ /dev/null @@ -1,27 +0,0 @@ -"""SunWEG Sensor definitions for the Phase type.""" - -from __future__ import annotations - -from homeassistant.components.sensor import SensorDeviceClass -from homeassistant.const import UnitOfElectricCurrent, UnitOfElectricPotential - -from .sensor_entity_description import SunWEGSensorEntityDescription - -PHASE_SENSOR_TYPES: tuple[SunWEGSensorEntityDescription, ...] = ( - SunWEGSensorEntityDescription( - key="voltage", - name="Voltage", - api_variable_key="_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - suggested_display_precision=2, - ), - SunWEGSensorEntityDescription( - key="amperage", - name="Amperage", - api_variable_key="_amperage", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - suggested_display_precision=1, - ), -) diff --git a/homeassistant/components/sunweg/sensor/sensor_entity_description.py b/homeassistant/components/sunweg/sensor/sensor_entity_description.py deleted file mode 100644 index 8c792ab617f..00000000000 --- a/homeassistant/components/sunweg/sensor/sensor_entity_description.py +++ /dev/null @@ -1,24 +0,0 @@ -"""Sensor Entity Description for the SunWEG integration.""" - -from __future__ import annotations - -from dataclasses import dataclass - -from homeassistant.components.sensor import SensorEntityDescription - - -@dataclass(frozen=True) -class SunWEGRequiredKeysMixin: - """Mixin for required keys.""" - - api_variable_key: str - - -@dataclass(frozen=True) -class SunWEGSensorEntityDescription(SensorEntityDescription, SunWEGRequiredKeysMixin): - """Describes SunWEG sensor entity.""" - - api_variable_unit: str | None = None - previous_value_drop_threshold: float | None = None - never_resets: bool = False - icon: str | None = None diff --git a/homeassistant/components/sunweg/sensor/string.py b/homeassistant/components/sunweg/sensor/string.py deleted file mode 100644 index ec59da5d20d..00000000000 --- a/homeassistant/components/sunweg/sensor/string.py +++ /dev/null @@ -1,27 +0,0 @@ -"""SunWEG Sensor definitions for the String type.""" - -from __future__ import annotations - -from homeassistant.components.sensor import SensorDeviceClass -from homeassistant.const import UnitOfElectricCurrent, UnitOfElectricPotential - -from .sensor_entity_description import SunWEGSensorEntityDescription - -STRING_SENSOR_TYPES: tuple[SunWEGSensorEntityDescription, ...] = ( - SunWEGSensorEntityDescription( - key="voltage", - name="Voltage", - api_variable_key="_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - suggested_display_precision=2, - ), - SunWEGSensorEntityDescription( - key="amperage", - name="Amperage", - api_variable_key="_amperage", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - suggested_display_precision=1, - ), -) diff --git a/homeassistant/components/sunweg/sensor/total.py b/homeassistant/components/sunweg/sensor/total.py deleted file mode 100644 index 2b94446a165..00000000000 --- a/homeassistant/components/sunweg/sensor/total.py +++ /dev/null @@ -1,50 +0,0 @@ -"""SunWEG Sensor definitions for Totals.""" - -from __future__ import annotations - -from homeassistant.components.sensor import SensorDeviceClass, SensorStateClass -from homeassistant.const import UnitOfEnergy, UnitOfPower - -from .sensor_entity_description import SunWEGSensorEntityDescription - -TOTAL_SENSOR_TYPES: tuple[SunWEGSensorEntityDescription, ...] = ( - SunWEGSensorEntityDescription( - key="total_money_total", - name="Money lifetime", - api_variable_key="_saving", - icon="mdi:cash", - native_unit_of_measurement="R$", - suggested_display_precision=2, - ), - SunWEGSensorEntityDescription( - key="total_energy_today", - name="Energy Today", - api_variable_key="_today_energy", - api_variable_unit="_today_energy_metric", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, - device_class=SensorDeviceClass.ENERGY, - state_class=SensorStateClass.TOTAL_INCREASING, - ), - SunWEGSensorEntityDescription( - key="total_output_power", - name="Output Power", - api_variable_key="_total_power", - native_unit_of_measurement=UnitOfPower.KILO_WATT, - device_class=SensorDeviceClass.POWER, - ), - SunWEGSensorEntityDescription( - key="total_energy_output", - name="Lifetime energy output", - api_variable_key="_total_energy", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, - device_class=SensorDeviceClass.ENERGY, - state_class=SensorStateClass.TOTAL, - never_resets=True, - ), - SunWEGSensorEntityDescription( - key="last_update", - name="Last Update", - api_variable_key="_last_update", - device_class=SensorDeviceClass.DATE, - ), -) diff --git a/homeassistant/components/sunweg/strings.json b/homeassistant/components/sunweg/strings.json index 9ab7be053b1..75abf5d9271 100644 --- a/homeassistant/components/sunweg/strings.json +++ b/homeassistant/components/sunweg/strings.json @@ -1,35 +1,8 @@ { - "config": { - "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "no_plants": "No plants have been found on this account", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" - }, - "error": { - "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "timeout_connect": "[%key:common::config_flow::error::timeout_connect%]" - }, - "step": { - "plant": { - "data": { - "plant_id": "Plant" - }, - "title": "Select your plant" - }, - "user": { - "data": { - "password": "[%key:common::config_flow::data::password%]", - "username": "[%key:common::config_flow::data::username%]" - }, - "title": "Enter your Sun WEG information" - }, - "reauth_confirm": { - "data": { - "password": "[%key:common::config_flow::data::password%]", - "username": "[%key:common::config_flow::data::username%]" - }, - "title": "[%key:common::config_flow::title::reauth%]" - } + "issues": { + "integration_removed": { + "title": "The SunWEG integration has been removed", + "description": "The SunWEG integration has been removed from Home Assistant.\n\nThe library that Home Assistant uses to connect with SunWEG services, [doesn't work as expected anymore, demanding daily token renew]({issue}).\n\nTo resolve this issue, please remove the (now defunct) integration entries from your Home Assistant setup. [Click here to see your existing SunWEG integration entries]({entries})." } } } diff --git a/homeassistant/components/swiss_public_transport/config_flow.py b/homeassistant/components/swiss_public_transport/config_flow.py index 4dc6efc2e85..872044097d6 100644 --- a/homeassistant/components/swiss_public_transport/config_flow.py +++ b/homeassistant/components/swiss_public_transport/config_flow.py @@ -190,7 +190,7 @@ class SwissPublicTransportConfigFlow(ConfigFlow, domain=DOMAIN): return "cannot_connect" except OpendataTransportError: return "bad_config" - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unknown error") return "unknown" return None diff --git a/homeassistant/components/switchbot/light.py b/homeassistant/components/switchbot/light.py index 0a2c342ecf0..4b9a7e1b988 100644 --- a/homeassistant/components/switchbot/light.py +++ b/homeassistant/components/switchbot/light.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import Any +from typing import Any, cast from switchbot import ColorMode as SwitchBotColorMode, SwitchbotBaseLight @@ -68,7 +68,9 @@ class SwitchbotLightEntity(SwitchbotEntity, LightEntity): async def async_turn_on(self, **kwargs: Any) -> None: """Instruct the light to turn on.""" - brightness = round(kwargs.get(ATTR_BRIGHTNESS, self.brightness) / 255 * 100) + brightness = round( + cast(int, kwargs.get(ATTR_BRIGHTNESS, self.brightness)) / 255 * 100 + ) if ( self.supported_color_modes diff --git a/homeassistant/components/switchbot/manifest.json b/homeassistant/components/switchbot/manifest.json index 85d5bcf6436..d9f6f98d1fd 100644 --- a/homeassistant/components/switchbot/manifest.json +++ b/homeassistant/components/switchbot/manifest.json @@ -39,5 +39,5 @@ "documentation": "https://www.home-assistant.io/integrations/switchbot", "iot_class": "local_push", "loggers": ["switchbot"], - "requirements": ["PySwitchbot==0.57.1"] + "requirements": ["PySwitchbot==0.58.0"] } diff --git a/homeassistant/components/switchbot/sensor.py b/homeassistant/components/switchbot/sensor.py index 9be5ad8be5a..d68c913db15 100644 --- a/homeassistant/components/switchbot/sensor.py +++ b/homeassistant/components/switchbot/sensor.py @@ -11,6 +11,7 @@ from homeassistant.components.sensor import ( ) from homeassistant.const import ( CONCENTRATION_PARTS_PER_MILLION, + LIGHT_LUX, PERCENTAGE, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, EntityCategory, @@ -71,6 +72,12 @@ SENSOR_TYPES: dict[str, SensorEntityDescription] = { state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.HUMIDITY, ), + "illuminance": SensorEntityDescription( + key="illuminance", + native_unit_of_measurement=LIGHT_LUX, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.ILLUMINANCE, + ), "temperature": SensorEntityDescription( key="temperature", native_unit_of_measurement=UnitOfTemperature.CELSIUS, diff --git a/homeassistant/components/synology_dsm/__init__.py b/homeassistant/components/synology_dsm/__init__.py index 70c7e76a53a..d9319beb595 100644 --- a/homeassistant/components/synology_dsm/__init__.py +++ b/homeassistant/components/synology_dsm/__init__.py @@ -123,6 +123,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: SynologyDSMConfigEntry) entry.runtime_data = SynologyDSMData( api=api, coordinator_central=coordinator_central, + coordinator_central_old_update_success=True, coordinator_cameras=coordinator_cameras, coordinator_switches=coordinator_switches, ) @@ -139,6 +140,17 @@ async def async_setup_entry(hass: HomeAssistant, entry: SynologyDSMConfigEntry) entry.async_on_state_change(async_notify_backup_listeners) ) + def async_check_last_update_success() -> None: + if ( + last := coordinator_central.last_update_success + ) is not entry.runtime_data.coordinator_central_old_update_success: + entry.runtime_data.coordinator_central_old_update_success = last + async_notify_backup_listeners() + + entry.runtime_data.coordinator_central.async_add_listener( + async_check_last_update_success + ) + return True diff --git a/homeassistant/components/synology_dsm/backup.py b/homeassistant/components/synology_dsm/backup.py index 11f4287dea2..46e47ebde16 100644 --- a/homeassistant/components/synology_dsm/backup.py +++ b/homeassistant/components/synology_dsm/backup.py @@ -58,6 +58,7 @@ async def async_get_backup_agents( if entry.unique_id is not None and entry.runtime_data.api.file_station and entry.options.get(CONF_BACKUP_PATH) + and entry.runtime_data.coordinator_central.last_update_success ] diff --git a/homeassistant/components/synology_dsm/coordinator.py b/homeassistant/components/synology_dsm/coordinator.py index a35432f0774..dd97dedf65e 100644 --- a/homeassistant/components/synology_dsm/coordinator.py +++ b/homeassistant/components/synology_dsm/coordinator.py @@ -35,6 +35,7 @@ class SynologyDSMData: api: SynoApi coordinator_central: SynologyDSMCentralUpdateCoordinator + coordinator_central_old_update_success: bool coordinator_cameras: SynologyDSMCameraUpdateCoordinator | None coordinator_switches: SynologyDSMSwitchUpdateCoordinator | None diff --git a/homeassistant/components/system_bridge/__init__.py b/homeassistant/components/system_bridge/__init__.py index 3bda29867cc..e1ee57e42b2 100644 --- a/homeassistant/components/system_bridge/__init__.py +++ b/homeassistant/components/system_bridge/__init__.py @@ -11,6 +11,7 @@ from systembridgeconnector.exceptions import ( AuthenticationException, ConnectionClosedException, ConnectionErrorException, + DataMissingException, ) from systembridgeconnector.version import Version from systembridgemodels.keyboard_key import KeyboardKey @@ -184,7 +185,7 @@ async def async_setup_entry( "host": entry.data[CONF_HOST], }, ) from exception - except TimeoutError as exception: + except (DataMissingException, TimeoutError) as exception: raise ConfigEntryNotReady( translation_domain=DOMAIN, translation_key="timeout", diff --git a/homeassistant/components/system_bridge/const.py b/homeassistant/components/system_bridge/const.py index 32507f6d84e..235d7e6b986 100644 --- a/homeassistant/components/system_bridge/const.py +++ b/homeassistant/components/system_bridge/const.py @@ -18,4 +18,6 @@ MODULES: Final[list[Module]] = [ Module.SYSTEM, ] -DATA_WAIT_TIMEOUT: Final[int] = 10 +DATA_WAIT_TIMEOUT: Final[int] = 20 + +GET_DATA_WAIT_TIMEOUT: Final[int] = 15 diff --git a/homeassistant/components/system_bridge/coordinator.py b/homeassistant/components/system_bridge/coordinator.py index 1690bad4a4d..7e545f39e46 100644 --- a/homeassistant/components/system_bridge/coordinator.py +++ b/homeassistant/components/system_bridge/coordinator.py @@ -33,7 +33,7 @@ from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from .const import DOMAIN, MODULES +from .const import DOMAIN, GET_DATA_WAIT_TIMEOUT, MODULES from .data import SystemBridgeData @@ -119,7 +119,10 @@ class SystemBridgeDataUpdateCoordinator(DataUpdateCoordinator[SystemBridgeData]) """Get data from WebSocket.""" await self.check_websocket_connected() - modules_data = await self.websocket_client.get_data(GetData(modules=modules)) + modules_data = await self.websocket_client.get_data( + GetData(modules=modules), + timeout=GET_DATA_WAIT_TIMEOUT, + ) # Merge new data with existing data for module in MODULES: diff --git a/homeassistant/components/system_bridge/sensor.py b/homeassistant/components/system_bridge/sensor.py index c7cae2f347b..d9226e7de6e 100644 --- a/homeassistant/components/system_bridge/sensor.py +++ b/homeassistant/components/system_bridge/sensor.py @@ -251,6 +251,7 @@ BASE_SENSOR_TYPES: tuple[SystemBridgeSensorEntityDescription, ...] = ( state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=UnitOfFrequency.GIGAHERTZ, device_class=SensorDeviceClass.FREQUENCY, + suggested_display_precision=2, icon="mdi:speedometer", value=cpu_speed, ), @@ -261,6 +262,7 @@ BASE_SENSOR_TYPES: tuple[SystemBridgeSensorEntityDescription, ...] = ( device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=2, value=lambda data: data.cpu.temperature, ), SystemBridgeSensorEntityDescription( @@ -270,6 +272,7 @@ BASE_SENSOR_TYPES: tuple[SystemBridgeSensorEntityDescription, ...] = ( device_class=SensorDeviceClass.VOLTAGE, state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=UnitOfElectricPotential.VOLT, + suggested_display_precision=2, value=lambda data: data.cpu.voltage, ), SystemBridgeSensorEntityDescription( @@ -284,6 +287,7 @@ BASE_SENSOR_TYPES: tuple[SystemBridgeSensorEntityDescription, ...] = ( state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=UnitOfInformation.GIGABYTES, device_class=SensorDeviceClass.DATA_SIZE, + suggested_display_precision=2, icon="mdi:memory", value=memory_free, ), @@ -291,6 +295,7 @@ BASE_SENSOR_TYPES: tuple[SystemBridgeSensorEntityDescription, ...] = ( key="memory_used_percentage", state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=2, icon="mdi:memory", value=lambda data: data.memory.virtual.percent, ), @@ -301,6 +306,7 @@ BASE_SENSOR_TYPES: tuple[SystemBridgeSensorEntityDescription, ...] = ( state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=UnitOfInformation.GIGABYTES, device_class=SensorDeviceClass.DATA_SIZE, + suggested_display_precision=2, icon="mdi:memory", value=memory_used, ), @@ -322,6 +328,7 @@ BASE_SENSOR_TYPES: tuple[SystemBridgeSensorEntityDescription, ...] = ( translation_key="load", state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=1, icon="mdi:percent", value=lambda data: data.cpu.usage, ), @@ -345,6 +352,7 @@ BATTERY_SENSOR_TYPES: tuple[SystemBridgeSensorEntityDescription, ...] = ( device_class=SensorDeviceClass.BATTERY, state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=2, value=lambda data: data.battery.percentage, ), SystemBridgeSensorEntityDescription( @@ -381,6 +389,7 @@ async def async_setup_entry( name=f"{partition.mount_point} space used", state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=2, icon="mdi:harddisk", value=( lambda data, @@ -457,6 +466,7 @@ async def async_setup_entry( state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=UnitOfFrequency.HERTZ, device_class=SensorDeviceClass.FREQUENCY, + suggested_display_precision=0, icon="mdi:monitor", value=lambda data, k=index: display_refresh_rate(data, k), ), @@ -476,6 +486,7 @@ async def async_setup_entry( state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=UnitOfFrequency.MEGAHERTZ, device_class=SensorDeviceClass.FREQUENCY, + suggested_display_precision=0, icon="mdi:speedometer", value=lambda data, k=index: gpu_core_clock_speed(data, k), ), @@ -490,6 +501,7 @@ async def async_setup_entry( state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=UnitOfFrequency.MEGAHERTZ, device_class=SensorDeviceClass.FREQUENCY, + suggested_display_precision=0, icon="mdi:speedometer", value=lambda data, k=index: gpu_memory_clock_speed(data, k), ), @@ -503,6 +515,7 @@ async def async_setup_entry( state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=UnitOfInformation.MEGABYTES, device_class=SensorDeviceClass.DATA_SIZE, + suggested_display_precision=0, icon="mdi:memory", value=lambda data, k=index: gpu_memory_free(data, k), ), @@ -515,6 +528,7 @@ async def async_setup_entry( name=f"{gpu.name} memory used %", state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=2, icon="mdi:memory", value=lambda data, k=index: gpu_memory_used_percentage(data, k), ), @@ -529,6 +543,7 @@ async def async_setup_entry( state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=UnitOfInformation.MEGABYTES, device_class=SensorDeviceClass.DATA_SIZE, + suggested_display_precision=0, icon="mdi:memory", value=lambda data, k=index: gpu_memory_used(data, k), ), @@ -569,6 +584,7 @@ async def async_setup_entry( device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=2, value=lambda data, k=index: gpu_temperature(data, k), ), entry.data[CONF_PORT], @@ -580,6 +596,7 @@ async def async_setup_entry( name=f"{gpu.name} usage %", state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=2, icon="mdi:percent", value=lambda data, k=index: gpu_usage_percentage(data, k), ), @@ -601,6 +618,7 @@ async def async_setup_entry( state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=PERCENTAGE, icon="mdi:percent", + suggested_display_precision=2, value=lambda data, k=cpu.id: cpu_usage_per_cpu(data, k), ), entry.data[CONF_PORT], @@ -614,6 +632,7 @@ async def async_setup_entry( native_unit_of_measurement=UnitOfPower.WATT, state_class=SensorStateClass.MEASUREMENT, icon="mdi:chip", + suggested_display_precision=2, value=lambda data, k=cpu.id: cpu_power_per_cpu(data, k), ), entry.data[CONF_PORT], diff --git a/homeassistant/components/systemmonitor/manifest.json b/homeassistant/components/systemmonitor/manifest.json index bd16464b290..9302746aa17 100644 --- a/homeassistant/components/systemmonitor/manifest.json +++ b/homeassistant/components/systemmonitor/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/systemmonitor", "iot_class": "local_push", "loggers": ["psutil"], - "requirements": ["psutil-home-assistant==0.0.1", "psutil==6.1.1"], + "requirements": ["psutil-home-assistant==0.0.1", "psutil==7.0.0"], "single_config_entry": true } diff --git a/homeassistant/components/tado/__init__.py b/homeassistant/components/tado/__init__.py index 4b0203acda3..d1994075f12 100644 --- a/homeassistant/components/tado/__init__.py +++ b/homeassistant/components/tado/__init__.py @@ -10,12 +10,17 @@ from PyTado.interface import Tado from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant, callback -from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady +from homeassistant.exceptions import ( + ConfigEntryAuthFailed, + ConfigEntryError, + ConfigEntryNotReady, +) from homeassistant.helpers import config_validation as cv from homeassistant.helpers.typing import ConfigType from .const import ( CONF_FALLBACK, + CONF_REFRESH_TOKEN, CONST_OVERLAY_MANUAL, CONST_OVERLAY_TADO_DEFAULT, CONST_OVERLAY_TADO_MODE, @@ -56,23 +61,34 @@ type TadoConfigEntry = ConfigEntry[TadoData] async def async_setup_entry(hass: HomeAssistant, entry: TadoConfigEntry) -> bool: """Set up Tado from a config entry.""" + if CONF_REFRESH_TOKEN not in entry.data: + raise ConfigEntryAuthFailed _async_import_options_from_data_if_missing(hass, entry) _LOGGER.debug("Setting up Tado connection") + _LOGGER.debug( + "Creating tado instance with refresh token: %s", + entry.data[CONF_REFRESH_TOKEN], + ) + + def create_tado_instance() -> tuple[Tado, str]: + """Create a Tado instance, this time with a previously obtained refresh token.""" + tado = Tado(saved_refresh_token=entry.data[CONF_REFRESH_TOKEN]) + return tado, tado.device_activation_status() + try: - tado = await hass.async_add_executor_job( - Tado, - entry.data[CONF_USERNAME], - entry.data[CONF_PASSWORD], - ) + tado, device_status = await hass.async_add_executor_job(create_tado_instance) except PyTado.exceptions.TadoWrongCredentialsException as err: raise ConfigEntryError(f"Invalid Tado credentials. Error: {err}") from err except PyTado.exceptions.TadoException as err: raise ConfigEntryNotReady(f"Error during Tado setup: {err}") from err - _LOGGER.debug( - "Tado connection established for username: %s", entry.data[CONF_USERNAME] - ) + if device_status != "COMPLETED": + raise ConfigEntryAuthFailed( + f"Device login flow status is {device_status}. Starting re-authentication." + ) + + _LOGGER.debug("Tado connection established") coordinator = TadoDataUpdateCoordinator(hass, entry, tado) await coordinator.async_config_entry_first_refresh() @@ -82,11 +98,23 @@ async def async_setup_entry(hass: HomeAssistant, entry: TadoConfigEntry) -> bool entry.runtime_data = TadoData(coordinator, mobile_coordinator) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - entry.async_on_unload(entry.add_update_listener(update_listener)) return True +async def async_migrate_entry(hass: HomeAssistant, entry: TadoConfigEntry) -> bool: + """Migrate old entry.""" + + if entry.version < 2: + _LOGGER.debug("Migrating Tado entry to version 2. Current data: %s", entry.data) + data = dict(entry.data) + data.pop(CONF_USERNAME, None) + data.pop(CONF_PASSWORD, None) + hass.config_entries.async_update_entry(entry=entry, data=data, version=2) + _LOGGER.debug("Migration to version 2 successful") + return True + + @callback def _async_import_options_from_data_if_missing( hass: HomeAssistant, entry: TadoConfigEntry @@ -106,11 +134,6 @@ def _async_import_options_from_data_if_missing( hass.config_entries.async_update_entry(entry, options=options) -async def update_listener(hass: HomeAssistant, entry: TadoConfigEntry): - """Handle options update.""" - await hass.config_entries.async_reload(entry.entry_id) - - async def async_unload_entry(hass: HomeAssistant, entry: TadoConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/tado/climate.py b/homeassistant/components/tado/climate.py index 6a2067ffff1..e6ae623d1fc 100644 --- a/homeassistant/components/tado/climate.py +++ b/homeassistant/components/tado/climate.py @@ -477,11 +477,9 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): @property def target_temperature(self) -> float | None: """Return the temperature we try to reach.""" - # If the target temperature will be None - # if the device is performing an action - # that does not affect the temperature or - # the device is switching states - return self._tado_zone_data.target_temp or self._tado_zone_data.current_temp + if self._current_tado_hvac_mode == CONST_MODE_OFF: + return TADO_DEFAULT_MIN_TEMP + return self._tado_zone_data.target_temp async def set_timer( self, diff --git a/homeassistant/components/tado/config_flow.py b/homeassistant/components/tado/config_flow.py index f251a292800..48c3d30cb2b 100644 --- a/homeassistant/components/tado/config_flow.py +++ b/homeassistant/components/tado/config_flow.py @@ -2,160 +2,176 @@ from __future__ import annotations +import asyncio +from collections.abc import Mapping import logging from typing import Any -import PyTado +from PyTado.exceptions import TadoException +from PyTado.http import DeviceActivationStatus from PyTado.interface import Tado -import requests.exceptions import voluptuous as vol +from yarl import URL from homeassistant.config_entries import ( + SOURCE_REAUTH, ConfigEntry, ConfigFlow, ConfigFlowResult, OptionsFlow, ) -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import callback from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.service_info.zeroconf import ( - ATTR_PROPERTIES_ID, - ZeroconfServiceInfo, -) +from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo from .const import ( CONF_FALLBACK, + CONF_REFRESH_TOKEN, CONST_OVERLAY_TADO_DEFAULT, CONST_OVERLAY_TADO_OPTIONS, DOMAIN, - UNIQUE_ID, ) _LOGGER = logging.getLogger(__name__) -DATA_SCHEMA = vol.Schema( - { - vol.Required(CONF_USERNAME): str, - vol.Required(CONF_PASSWORD): str, - } -) - - -async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, Any]: - """Validate the user input allows us to connect. - - Data has the keys from DATA_SCHEMA with values provided by the user. - """ - - try: - tado = await hass.async_add_executor_job( - Tado, data[CONF_USERNAME], data[CONF_PASSWORD] - ) - tado_me = await hass.async_add_executor_job(tado.get_me) - except KeyError as ex: - raise InvalidAuth from ex - except RuntimeError as ex: - raise CannotConnect from ex - except requests.exceptions.HTTPError as ex: - if ex.response.status_code > 400 and ex.response.status_code < 500: - raise InvalidAuth from ex - raise CannotConnect from ex - - if "homes" not in tado_me or len(tado_me["homes"]) == 0: - raise NoHomes - - home = tado_me["homes"][0] - unique_id = str(home["id"]) - name = home["name"] - - return {"title": name, UNIQUE_ID: unique_id} - class TadoConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Tado.""" - VERSION = 1 + VERSION = 2 + login_task: asyncio.Task | None = None + refresh_token: str | None = None + tado: Tado | None = None + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle reauth on credential failure.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Prepare reauth.""" + if user_input is None: + return self.async_show_form(step_id="reauth_confirm") + + return await self.async_step_user() async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: - """Handle the initial step.""" - errors = {} - if user_input is not None: + """Handle users reauth credentials.""" + + if self.tado is None: + _LOGGER.debug("Initiating device activation") try: - validated = await validate_input(self.hass, user_input) - except CannotConnect: - errors["base"] = "cannot_connect" - except InvalidAuth: - errors["base"] = "invalid_auth" - except NoHomes: - errors["base"] = "no_homes" - except Exception: - _LOGGER.exception("Unexpected exception") - errors["base"] = "unknown" + self.tado = await self.hass.async_add_executor_job(Tado) + except TadoException: + _LOGGER.exception("Error while initiating Tado") + return self.async_abort(reason="cannot_connect") + assert self.tado is not None + tado_device_url = self.tado.device_verification_url() + user_code = URL(tado_device_url).query["user_code"] - if "base" not in errors: - await self.async_set_unique_id(validated[UNIQUE_ID]) - self._abort_if_unique_id_configured() - return self.async_create_entry( - title=validated["title"], data=user_input - ) + async def _wait_for_login() -> None: + """Wait for the user to login.""" + assert self.tado is not None + _LOGGER.debug("Waiting for device activation") + try: + await self.hass.async_add_executor_job(self.tado.device_activation) + except Exception as ex: + _LOGGER.exception("Error while waiting for device activation") + raise CannotConnect from ex - return self.async_show_form( - step_id="user", data_schema=DATA_SCHEMA, errors=errors + if ( + self.tado.device_activation_status() + is not DeviceActivationStatus.COMPLETED + ): + raise CannotConnect + + _LOGGER.debug("Checking login task") + if self.login_task is None: + _LOGGER.debug("Creating task for device activation") + self.login_task = self.hass.async_create_task(_wait_for_login()) + + if self.login_task.done(): + _LOGGER.debug("Login task is done, checking results") + if self.login_task.exception(): + return self.async_show_progress_done(next_step_id="timeout") + self.refresh_token = await self.hass.async_add_executor_job( + self.tado.get_refresh_token + ) + return self.async_show_progress_done(next_step_id="finish_login") + + return self.async_show_progress( + step_id="user", + progress_action="wait_for_device", + description_placeholders={ + "url": tado_device_url, + "code": user_code, + }, + progress_task=self.login_task, ) + async def async_step_finish_login( + self, + user_input: dict[str, Any] | None = None, + ) -> ConfigFlowResult: + """Handle the finalization of reauth.""" + _LOGGER.debug("Finalizing reauth") + assert self.tado is not None + tado_me = await self.hass.async_add_executor_job(self.tado.get_me) + + if "homes" not in tado_me or len(tado_me["homes"]) == 0: + return self.async_abort(reason="no_homes") + + home = tado_me["homes"][0] + unique_id = str(home["id"]) + name = home["name"] + + if self.source != SOURCE_REAUTH: + await self.async_set_unique_id(unique_id) + self._abort_if_unique_id_configured() + + return self.async_create_entry( + title=name, + data={CONF_REFRESH_TOKEN: self.refresh_token}, + ) + + self._abort_if_unique_id_mismatch(reason="reauth_account_mismatch") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data={CONF_REFRESH_TOKEN: self.refresh_token}, + ) + + async def async_step_timeout( + self, + user_input: dict[str, Any] | None = None, + ) -> ConfigFlowResult: + """Handle issues that need transition await from progress step.""" + if user_input is None: + return self.async_show_form( + step_id="timeout", + ) + del self.login_task + return await self.async_step_user() + async def async_step_homekit( self, discovery_info: ZeroconfServiceInfo ) -> ConfigFlowResult: """Handle HomeKit discovery.""" - self._async_abort_entries_match() - properties = { - key.lower(): value for (key, value) in discovery_info.properties.items() - } - await self.async_set_unique_id(properties[ATTR_PROPERTIES_ID]) - self._abort_if_unique_id_configured() - return await self.async_step_user() + await self._async_handle_discovery_without_unique_id() + return await self.async_step_homekit_confirm() - async def async_step_reconfigure( + async def async_step_homekit_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: - """Handle a reconfiguration flow initialized by the user.""" - errors: dict[str, str] = {} - reconfigure_entry = self._get_reconfigure_entry() + """Prepare for Homekit.""" + if user_input is None: + return self.async_show_form(step_id="homekit_confirm") - if user_input is not None: - user_input[CONF_USERNAME] = reconfigure_entry.data[CONF_USERNAME] - try: - await validate_input(self.hass, user_input) - except CannotConnect: - errors["base"] = "cannot_connect" - except PyTado.exceptions.TadoWrongCredentialsException: - errors["base"] = "invalid_auth" - except NoHomes: - errors["base"] = "no_homes" - except Exception: # pylint: disable=broad-except - _LOGGER.exception("Unexpected exception") - errors["base"] = "unknown" - - if not errors: - return self.async_update_reload_and_abort( - reconfigure_entry, data_updates=user_input - ) - - return self.async_show_form( - step_id="reconfigure", - data_schema=vol.Schema( - { - vol.Required(CONF_PASSWORD): str, - } - ), - errors=errors, - description_placeholders={ - CONF_USERNAME: reconfigure_entry.data[CONF_USERNAME] - }, - ) + return await self.async_step_user() @staticmethod @callback @@ -173,8 +189,10 @@ class OptionsFlowHandler(OptionsFlow): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle options flow.""" - if user_input is not None: - return self.async_create_entry(data=user_input) + if user_input: + result = self.async_create_entry(data=user_input) + await self.hass.config_entries.async_reload(self.config_entry.entry_id) + return result data_schema = vol.Schema( { @@ -191,11 +209,3 @@ class OptionsFlowHandler(OptionsFlow): class CannotConnect(HomeAssistantError): """Error to indicate we cannot connect.""" - - -class InvalidAuth(HomeAssistantError): - """Error to indicate there is invalid auth.""" - - -class NoHomes(HomeAssistantError): - """Error to indicate the account has no homes.""" diff --git a/homeassistant/components/tado/const.py b/homeassistant/components/tado/const.py index bdc4bff1943..7720ff09110 100644 --- a/homeassistant/components/tado/const.py +++ b/homeassistant/components/tado/const.py @@ -37,6 +37,7 @@ TADO_HVAC_ACTION_TO_HA_HVAC_ACTION = { # Configuration CONF_FALLBACK = "fallback" CONF_HOME_ID = "home_id" +CONF_REFRESH_TOKEN = "refresh_token" DATA = "data" # Weather diff --git a/homeassistant/components/tado/coordinator.py b/homeassistant/components/tado/coordinator.py index 559bc4a16fb..5f3aa1de1e4 100644 --- a/homeassistant/components/tado/coordinator.py +++ b/homeassistant/components/tado/coordinator.py @@ -10,7 +10,6 @@ from PyTado.interface import Tado from requests import RequestException from homeassistant.components.climate import PRESET_AWAY, PRESET_HOME -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -20,6 +19,7 @@ if TYPE_CHECKING: from .const import ( CONF_FALLBACK, + CONF_REFRESH_TOKEN, CONST_OVERLAY_TADO_DEFAULT, DOMAIN, INSIDE_TEMPERATURE_MEASUREMENT, @@ -58,8 +58,7 @@ class TadoDataUpdateCoordinator(DataUpdateCoordinator[dict[str, dict]]): update_interval=SCAN_INTERVAL, ) self._tado = tado - self._username = config_entry.data[CONF_USERNAME] - self._password = config_entry.data[CONF_PASSWORD] + self._refresh_token = config_entry.data[CONF_REFRESH_TOKEN] self._fallback = config_entry.options.get( CONF_FALLBACK, CONST_OVERLAY_TADO_DEFAULT ) @@ -108,6 +107,18 @@ class TadoDataUpdateCoordinator(DataUpdateCoordinator[dict[str, dict]]): self.data["weather"] = home["weather"] self.data["geofence"] = home["geofence"] + refresh_token = await self.hass.async_add_executor_job( + self._tado.get_refresh_token + ) + + if refresh_token != self._refresh_token: + _LOGGER.debug("New refresh token obtained from Tado: %s", refresh_token) + self._refresh_token = refresh_token + self.hass.config_entries.async_update_entry( + self.config_entry, + data={**self.config_entry.data, CONF_REFRESH_TOKEN: refresh_token}, + ) + return self.data async def _async_update_devices(self) -> dict[str, dict]: diff --git a/homeassistant/components/tado/manifest.json b/homeassistant/components/tado/manifest.json index b83e2695137..75ddbacc585 100644 --- a/homeassistant/components/tado/manifest.json +++ b/homeassistant/components/tado/manifest.json @@ -14,5 +14,5 @@ }, "iot_class": "cloud_polling", "loggers": ["PyTado"], - "requirements": ["python-tado==0.18.6"] + "requirements": ["python-tado==0.18.9"] } diff --git a/homeassistant/components/tado/strings.json b/homeassistant/components/tado/strings.json index ff1afc3c03d..53de3969998 100644 --- a/homeassistant/components/tado/strings.json +++ b/homeassistant/components/tado/strings.json @@ -1,33 +1,28 @@ { "config": { + "progress": { + "wait_for_device": "To authenticate, open the following URL and login at Tado:\n{url}\nIf the code is not automatically copied, paste the following code to authorize the integration:\n\n```{code}```\n\n\nThe login attempt will time out after five minutes." + }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "could_not_authenticate": "Could not authenticate with Tado.", + "no_homes": "There are no homes linked to this Tado account.", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" }, "step": { - "user": { - "data": { - "password": "[%key:common::config_flow::data::password%]", - "username": "[%key:common::config_flow::data::username%]" - }, - "title": "Connect to your Tado account" + "reauth_confirm": { + "title": "Authenticate with Tado", + "description": "You need to reauthenticate with Tado. Press `Submit` to start the authentication process." }, - "reconfigure": { - "title": "Reconfigure your Tado", - "description": "Reconfigure the entry for your account: `{username}`.", - "data": { - "password": "[%key:common::config_flow::data::password%]" - }, - "data_description": { - "password": "Enter the (new) password for Tado." - } + "homekit": { + "title": "Authenticate with Tado", + "description": "Your device has been discovered and needs to authenticate with Tado. Press `Submit` to start the authentication process." + }, + "timeout": { + "description": "The authentication process timed out. Please try again." } - }, - "error": { - "unknown": "[%key:common::config_flow::error::unknown%]", - "no_homes": "There are no homes linked to this Tado account.", - "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" } }, "options": { diff --git a/homeassistant/components/tasmota/strings.json b/homeassistant/components/tasmota/strings.json index 22af3304297..13edee55110 100644 --- a/homeassistant/components/tasmota/strings.json +++ b/homeassistant/components/tasmota/strings.json @@ -20,11 +20,11 @@ "issues": { "topic_duplicated": { "title": "Several Tasmota devices are sharing the same topic", - "description": "Several Tasmota devices are sharing the topic {topic}.\n\n Tasmota devices with this problem: {offenders}." + "description": "Several Tasmota devices are sharing the topic {topic}.\n\nTasmota devices with this problem: {offenders}." }, "topic_no_prefix": { "title": "Tasmota device {name} has an invalid MQTT topic", - "description": "Tasmota device {name} with IP {ip} does not include `%prefix%` in its fulltopic.\n\nEntities for this devices are disabled until the configuration has been corrected." + "description": "Tasmota device {name} with IP {ip} does not include `%prefix%` in its FullTopic.\n\nEntities for this device are disabled until the configuration has been corrected." } } } diff --git a/homeassistant/components/tedee/binary_sensor.py b/homeassistant/components/tedee/binary_sensor.py index a01b889ef8f..6570d9c5428 100644 --- a/homeassistant/components/tedee/binary_sensor.py +++ b/homeassistant/components/tedee/binary_sensor.py @@ -41,7 +41,7 @@ ENTITIES: tuple[TedeeBinarySensorEntityDescription, ...] = ( TedeeBinarySensorEntityDescription( key="semi_locked", translation_key="semi_locked", - is_on_fn=lambda lock: lock.state == TedeeLockState.HALF_OPEN, + is_on_fn=lambda lock: lock.state is TedeeLockState.HALF_OPEN, entity_category=EntityCategory.DIAGNOSTIC, ), TedeeBinarySensorEntityDescription( @@ -53,7 +53,10 @@ ENTITIES: tuple[TedeeBinarySensorEntityDescription, ...] = ( TedeeBinarySensorEntityDescription( key="uncalibrated", translation_key="uncalibrated", - is_on_fn=lambda lock: lock.state == TedeeLockState.UNCALIBRATED, + is_on_fn=( + lambda lock: lock.state is TedeeLockState.UNCALIBRATED + or lock.state is TedeeLockState.UNKNOWN + ), device_class=BinarySensorDeviceClass.PROBLEM, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, diff --git a/homeassistant/components/template/switch.py b/homeassistant/components/template/switch.py index b76fc28b83c..fb3aeb1e42a 100644 --- a/homeassistant/components/template/switch.py +++ b/homeassistant/components/template/switch.py @@ -120,7 +120,7 @@ def rewrite_legacy_to_modern_conf( return switches -def rewrite_options_to_moder_conf(option_config: dict[str, dict]) -> dict[str, dict]: +def rewrite_options_to_modern_conf(option_config: dict[str, dict]) -> dict[str, dict]: """Rewrite option configuration to modern configuration.""" option_config = {**option_config} @@ -189,7 +189,7 @@ async def async_setup_entry( """Initialize config entry.""" _options = dict(config_entry.options) _options.pop("template_type") - _options = rewrite_options_to_moder_conf(_options) + _options = rewrite_options_to_modern_conf(_options) validated_config = SWITCH_CONFIG_SCHEMA(_options) async_add_entities([SwitchTemplate(hass, validated_config, config_entry.entry_id)]) @@ -199,7 +199,8 @@ def async_create_preview_switch( hass: HomeAssistant, name: str, config: dict[str, Any] ) -> SwitchTemplate: """Create a preview switch.""" - validated_config = SWITCH_CONFIG_SCHEMA(config | {CONF_NAME: name}) + updated_config = rewrite_options_to_modern_conf(config) + validated_config = SWITCH_CONFIG_SCHEMA(updated_config | {CONF_NAME: name}) return SwitchTemplate(hass, validated_config, None) diff --git a/homeassistant/components/tesla_fleet/__init__.py b/homeassistant/components/tesla_fleet/__init__.py index 27bfb9134ab..2642bd2f7d5 100644 --- a/homeassistant/components/tesla_fleet/__init__.py +++ b/homeassistant/components/tesla_fleet/__init__.py @@ -5,12 +5,7 @@ from typing import Final from aiohttp.client_exceptions import ClientResponseError import jwt -from tesla_fleet_api import ( - EnergySpecific, - TeslaFleetApi, - VehicleSigned, - VehicleSpecific, -) +from tesla_fleet_api import TeslaFleetApi from tesla_fleet_api.const import Scope from tesla_fleet_api.exceptions import ( InvalidRegion, @@ -128,7 +123,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - vehicles: list[TeslaFleetVehicleData] = [] energysites: list[TeslaFleetEnergyData] = [] for product in products: - if "vin" in product and hasattr(tesla, "vehicle"): + if "vin" in product and Scope.VEHICLE_DEVICE_DATA in scopes: # Remove the protobuff 'cached_data' that we do not use to save memory product.pop("cached_data", None) vin = product["vin"] @@ -136,9 +131,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - if signing: if not tesla.private_key: await tesla.get_private_key(hass.config.path("tesla_fleet.key")) - api = VehicleSigned(tesla.vehicle, vin) + api = tesla.vehicles.createSigned(vin) else: - api = VehicleSpecific(tesla.vehicle, vin) + api = tesla.vehicles.createFleet(vin) coordinator = TeslaFleetVehicleDataCoordinator(hass, entry, api, product) await coordinator.async_config_entry_first_refresh() @@ -160,7 +155,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - signing=signing, ) ) - elif "energy_site_id" in product and hasattr(tesla, "energy"): + elif "energy_site_id" in product and Scope.ENERGY_DEVICE_DATA in scopes: site_id = product["energy_site_id"] if not ( product["components"]["battery"] @@ -173,7 +168,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - ) continue - api = EnergySpecific(tesla.energy, site_id) + api = tesla.energySites.create(site_id) live_coordinator = TeslaFleetEnergySiteLiveCoordinator(hass, entry, api) history_coordinator = TeslaFleetEnergySiteHistoryCoordinator( @@ -227,7 +222,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - # Setup Platforms entry.runtime_data = TeslaFleetData(vehicles, energysites, scopes) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - return True diff --git a/homeassistant/components/tesla_fleet/coordinator.py b/homeassistant/components/tesla_fleet/coordinator.py index 6f881d0feba..50a69258a31 100644 --- a/homeassistant/components/tesla_fleet/coordinator.py +++ b/homeassistant/components/tesla_fleet/coordinator.py @@ -7,7 +7,6 @@ from random import randint from time import time from typing import TYPE_CHECKING, Any -from tesla_fleet_api import EnergySpecific, VehicleSpecific from tesla_fleet_api.const import TeslaEnergyPeriod, VehicleDataEndpoint from tesla_fleet_api.exceptions import ( InvalidToken, @@ -17,6 +16,7 @@ from tesla_fleet_api.exceptions import ( TeslaFleetError, VehicleOffline, ) +from tesla_fleet_api.tesla import EnergySite, VehicleFleet from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed @@ -70,7 +70,7 @@ class TeslaFleetVehicleDataCoordinator(DataUpdateCoordinator[dict[str, Any]]): self, hass: HomeAssistant, config_entry: TeslaFleetConfigEntry, - api: VehicleSpecific, + api: VehicleFleet, product: dict, ) -> None: """Initialize TeslaFleet Vehicle Update Coordinator.""" @@ -149,7 +149,7 @@ class TeslaFleetEnergySiteLiveCoordinator(DataUpdateCoordinator[dict[str, Any]]) self, hass: HomeAssistant, config_entry: TeslaFleetConfigEntry, - api: EnergySpecific, + api: EnergySite, ) -> None: """Initialize TeslaFleet Energy Site Live coordinator.""" super().__init__( @@ -202,7 +202,7 @@ class TeslaFleetEnergySiteHistoryCoordinator(DataUpdateCoordinator[dict[str, Any self, hass: HomeAssistant, config_entry: TeslaFleetConfigEntry, - api: EnergySpecific, + api: EnergySite, ) -> None: """Initialize Tesla Fleet Energy Site History coordinator.""" super().__init__( @@ -266,7 +266,7 @@ class TeslaFleetEnergySiteInfoCoordinator(DataUpdateCoordinator[dict[str, Any]]) self, hass: HomeAssistant, config_entry: TeslaFleetConfigEntry, - api: EnergySpecific, + api: EnergySite, product: dict, ) -> None: """Initialize TeslaFleet Energy Info coordinator.""" diff --git a/homeassistant/components/tesla_fleet/entity.py b/homeassistant/components/tesla_fleet/entity.py index 0260acf368e..583e92595d0 100644 --- a/homeassistant/components/tesla_fleet/entity.py +++ b/homeassistant/components/tesla_fleet/entity.py @@ -3,8 +3,9 @@ from abc import abstractmethod from typing import Any -from tesla_fleet_api import EnergySpecific, VehicleSpecific from tesla_fleet_api.const import Scope +from tesla_fleet_api.tesla.energysite import EnergySite +from tesla_fleet_api.tesla.vehicle.fleet import VehicleFleet from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.device_registry import DeviceInfo @@ -41,7 +42,7 @@ class TeslaFleetEntity( | TeslaFleetEnergySiteLiveCoordinator | TeslaFleetEnergySiteHistoryCoordinator | TeslaFleetEnergySiteInfoCoordinator, - api: VehicleSpecific | EnergySpecific, + api: VehicleFleet | EnergySite, key: str, ) -> None: """Initialize common aspects of a TeslaFleet entity.""" diff --git a/homeassistant/components/tesla_fleet/manifest.json b/homeassistant/components/tesla_fleet/manifest.json index 010197ccbd9..56dc49ad111 100644 --- a/homeassistant/components/tesla_fleet/manifest.json +++ b/homeassistant/components/tesla_fleet/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/tesla_fleet", "iot_class": "cloud_polling", "loggers": ["tesla-fleet-api"], - "requirements": ["tesla-fleet-api==0.9.13"] + "requirements": ["tesla-fleet-api==1.0.16"] } diff --git a/homeassistant/components/tesla_fleet/models.py b/homeassistant/components/tesla_fleet/models.py index 469ebdca914..17a2bf50ed1 100644 --- a/homeassistant/components/tesla_fleet/models.py +++ b/homeassistant/components/tesla_fleet/models.py @@ -5,8 +5,8 @@ from __future__ import annotations import asyncio from dataclasses import dataclass -from tesla_fleet_api import EnergySpecific, VehicleSpecific from tesla_fleet_api.const import Scope +from tesla_fleet_api.tesla import EnergySite, VehicleFleet from homeassistant.helpers.device_registry import DeviceInfo @@ -31,7 +31,7 @@ class TeslaFleetData: class TeslaFleetVehicleData: """Data for a vehicle in the TeslaFleet integration.""" - api: VehicleSpecific + api: VehicleFleet coordinator: TeslaFleetVehicleDataCoordinator vin: str device: DeviceInfo @@ -43,7 +43,7 @@ class TeslaFleetVehicleData: class TeslaFleetEnergyData: """Data for a vehicle in the TeslaFleet integration.""" - api: EnergySpecific + api: EnergySite live_coordinator: TeslaFleetEnergySiteLiveCoordinator history_coordinator: TeslaFleetEnergySiteHistoryCoordinator info_coordinator: TeslaFleetEnergySiteInfoCoordinator diff --git a/homeassistant/components/tesla_fleet/number.py b/homeassistant/components/tesla_fleet/number.py index a1123ab9553..b4f7e42cafd 100644 --- a/homeassistant/components/tesla_fleet/number.py +++ b/homeassistant/components/tesla_fleet/number.py @@ -7,8 +7,8 @@ from dataclasses import dataclass from itertools import chain from typing import Any -from tesla_fleet_api import EnergySpecific, VehicleSpecific from tesla_fleet_api.const import Scope +from tesla_fleet_api.tesla import EnergySite, VehicleFleet from homeassistant.components.number import ( NumberDeviceClass, @@ -33,7 +33,7 @@ PARALLEL_UPDATES = 0 class TeslaFleetNumberVehicleEntityDescription(NumberEntityDescription): """Describes TeslaFleet Number entity.""" - func: Callable[[VehicleSpecific, float], Awaitable[Any]] + func: Callable[[VehicleFleet, float], Awaitable[Any]] native_min_value: float native_max_value: float min_key: str | None = None @@ -74,7 +74,7 @@ VEHICLE_DESCRIPTIONS: tuple[TeslaFleetNumberVehicleEntityDescription, ...] = ( class TeslaFleetNumberBatteryEntityDescription(NumberEntityDescription): """Describes TeslaFleet Number entity.""" - func: Callable[[EnergySpecific, float], Awaitable[Any]] + func: Callable[[EnergySite, float], Awaitable[Any]] requires: str | None = None diff --git a/homeassistant/components/tesla_fleet/strings.json b/homeassistant/components/tesla_fleet/strings.json index 331885893fe..c5a03e183e4 100644 --- a/homeassistant/components/tesla_fleet/strings.json +++ b/homeassistant/components/tesla_fleet/strings.json @@ -209,7 +209,7 @@ "high": "High", "low": "Low", "medium": "Medium", - "off": "Off" + "off": "[%key:common::state::off%]" } }, "climate_state_seat_heater_rear_center": { @@ -218,7 +218,7 @@ "high": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::high%]", "low": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::low%]", "medium": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::medium%]", - "off": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::off%]" + "off": "[%key:common::state::off%]" } }, "climate_state_seat_heater_rear_left": { @@ -227,7 +227,7 @@ "high": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::high%]", "low": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::low%]", "medium": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::medium%]", - "off": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::off%]" + "off": "[%key:common::state::off%]" } }, "climate_state_seat_heater_rear_right": { @@ -236,7 +236,7 @@ "high": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::high%]", "low": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::low%]", "medium": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::medium%]", - "off": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::off%]" + "off": "[%key:common::state::off%]" } }, "climate_state_seat_heater_right": { @@ -245,7 +245,7 @@ "high": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::high%]", "low": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::low%]", "medium": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::medium%]", - "off": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::off%]" + "off": "[%key:common::state::off%]" } }, "climate_state_seat_heater_third_row_left": { @@ -254,7 +254,7 @@ "high": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::high%]", "low": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::low%]", "medium": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::medium%]", - "off": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::off%]" + "off": "[%key:common::state::off%]" } }, "climate_state_seat_heater_third_row_right": { @@ -263,7 +263,7 @@ "high": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::high%]", "low": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::low%]", "medium": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::medium%]", - "off": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::off%]" + "off": "[%key:common::state::off%]" } }, "climate_state_steering_wheel_heat_level": { @@ -271,7 +271,7 @@ "state": { "high": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::high%]", "low": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::low%]", - "off": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::off%]" + "off": "[%key:common::state::off%]" } }, "components_customer_preferred_export_rule": { @@ -330,8 +330,8 @@ "state": { "starting": "Starting", "charging": "[%key:common::state::charging%]", - "disconnected": "Disconnected", - "stopped": "Stopped", + "disconnected": "[%key:common::state::disconnected%]", + "stopped": "[%key:common::state::stopped%]", "complete": "Complete", "no_power": "No power" } @@ -418,8 +418,8 @@ "name": "Grid Status", "state": { "island_status_unknown": "Unknown", - "on_grid": "Connected", - "off_grid": "Disconnected", + "on_grid": "[%key:common::state::connected%]", + "off_grid": "[%key:common::state::disconnected%]", "off_grid_unintentional": "Disconnected unintentionally", "off_grid_intentional": "Disconnected intentionally" } diff --git a/homeassistant/components/tesla_fleet/switch.py b/homeassistant/components/tesla_fleet/switch.py index 614af8772cc..4c64acfafa6 100644 --- a/homeassistant/components/tesla_fleet/switch.py +++ b/homeassistant/components/tesla_fleet/switch.py @@ -7,7 +7,7 @@ from dataclasses import dataclass from itertools import chain from typing import Any -from tesla_fleet_api.const import Scope, Seat +from tesla_fleet_api.const import AutoSeat, Scope, Seat from homeassistant.components.switch import ( SwitchDeviceClass, @@ -46,7 +46,9 @@ VEHICLE_DESCRIPTIONS: tuple[TeslaFleetSwitchEntityDescription, ...] = ( ), TeslaFleetSwitchEntityDescription( key="climate_state_auto_seat_climate_left", - on_func=lambda api: api.remote_auto_seat_climate_request(Seat.FRONT_LEFT, True), + on_func=lambda api: api.remote_auto_seat_climate_request( + AutoSeat.FRONT_LEFT, True + ), off_func=lambda api: api.remote_auto_seat_climate_request( Seat.FRONT_LEFT, False ), @@ -55,10 +57,10 @@ VEHICLE_DESCRIPTIONS: tuple[TeslaFleetSwitchEntityDescription, ...] = ( TeslaFleetSwitchEntityDescription( key="climate_state_auto_seat_climate_right", on_func=lambda api: api.remote_auto_seat_climate_request( - Seat.FRONT_RIGHT, True + AutoSeat.FRONT_RIGHT, True ), off_func=lambda api: api.remote_auto_seat_climate_request( - Seat.FRONT_RIGHT, False + AutoSeat.FRONT_RIGHT, False ), scopes=[Scope.VEHICLE_CMDS], ), diff --git a/homeassistant/components/teslemetry/__init__.py b/homeassistant/components/teslemetry/__init__.py index eef974cc5a7..b820d2d1b43 100644 --- a/homeassistant/components/teslemetry/__init__.py +++ b/homeassistant/components/teslemetry/__init__.py @@ -4,7 +4,6 @@ import asyncio from collections.abc import Callable from typing import Final -from tesla_fleet_api import EnergySpecific, Teslemetry, VehicleSpecific from tesla_fleet_api.const import Scope from tesla_fleet_api.exceptions import ( Forbidden, @@ -12,6 +11,7 @@ from tesla_fleet_api.exceptions import ( SubscriptionRequired, TeslaFleetError, ) +from tesla_fleet_api.teslemetry import Teslemetry from teslemetry_stream import TeslemetryStream from homeassistant.config_entries import ConfigEntry @@ -111,7 +111,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) - # Remove the protobuff 'cached_data' that we do not use to save memory product.pop("cached_data", None) vin = product["vin"] - api = VehicleSpecific(teslemetry.vehicle, vin) + api = teslemetry.vehicles.create(vin) coordinator = TeslemetryVehicleDataCoordinator(hass, entry, api, product) device = DeviceInfo( identifiers={(DOMAIN, vin)}, @@ -156,7 +156,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) - ) continue - api = EnergySpecific(teslemetry.energy, site_id) + api = teslemetry.energySites.create(site_id) device = DeviceInfo( identifiers={(DOMAIN, str(site_id))}, manufacturer="Tesla", diff --git a/homeassistant/components/teslemetry/climate.py b/homeassistant/components/teslemetry/climate.py index 86811131ab6..c1c8fcd2f73 100644 --- a/homeassistant/components/teslemetry/climate.py +++ b/homeassistant/components/teslemetry/climate.py @@ -6,9 +6,11 @@ from itertools import chain from typing import Any, cast from tesla_fleet_api.const import CabinOverheatProtectionTemp, Scope +from tesla_fleet_api.teslemetry import Vehicle from homeassistant.components.climate import ( ATTR_HVAC_MODE, + HVAC_MODES, ClimateEntity, ClimateEntityFeature, HVACMode, @@ -22,15 +24,32 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from homeassistant.helpers.restore_state import RestoreEntity from . import TeslemetryConfigEntry from .const import DOMAIN, TeslemetryClimateSide -from .entity import TeslemetryVehicleEntity +from .entity import ( + TeslemetryRootEntity, + TeslemetryVehicleEntity, + TeslemetryVehicleStreamEntity, +) from .helpers import handle_vehicle_command from .models import TeslemetryVehicleData DEFAULT_MIN_TEMP = 15 DEFAULT_MAX_TEMP = 28 +COP_TEMPERATURES = { + 30: CabinOverheatProtectionTemp.LOW, + 35: CabinOverheatProtectionTemp.MEDIUM, + 40: CabinOverheatProtectionTemp.HIGH, +} +PRESET_MODES = { + "Off": "off", + "On": "keep", + "Dog": "dog", + "Party": "camp", +} + PARALLEL_UPDATES = 0 @@ -45,13 +64,21 @@ async def async_setup_entry( async_add_entities( chain( ( - TeslemetryClimateEntity( + TeslemetryPollingClimateEntity( + vehicle, TeslemetryClimateSide.DRIVER, entry.runtime_data.scopes + ) + if vehicle.api.pre2021 or vehicle.firmware < "2024.44.25" + else TeslemetryStreamingClimateEntity( vehicle, TeslemetryClimateSide.DRIVER, entry.runtime_data.scopes ) for vehicle in entry.runtime_data.vehicles ), ( - TeslemetryCabinOverheatProtectionEntity( + TeslemetryPollingCabinOverheatProtectionEntity( + vehicle, entry.runtime_data.scopes + ) + if vehicle.api.pre2021 or vehicle.firmware < "2024.44.25" + else TeslemetryStreamingCabinOverheatProtectionEntity( vehicle, entry.runtime_data.scopes ) for vehicle in entry.runtime_data.vehicles @@ -60,66 +87,22 @@ async def async_setup_entry( ) -class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): - """Telemetry vehicle climate entity.""" +class TeslemetryClimateEntity(TeslemetryRootEntity, ClimateEntity): + """Vehicle Climate Control.""" + + api: Vehicle _attr_precision = PRECISION_HALVES - _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_hvac_modes = [HVACMode.HEAT_COOL, HVACMode.OFF] - _attr_supported_features = ( - ClimateEntityFeature.TURN_ON - | ClimateEntityFeature.TURN_OFF - | ClimateEntityFeature.TARGET_TEMPERATURE - | ClimateEntityFeature.PRESET_MODE - ) - _attr_preset_modes = ["off", "keep", "dog", "camp"] - - def __init__( - self, - data: TeslemetryVehicleData, - side: TeslemetryClimateSide, - scopes: Scope, - ) -> None: - """Initialize the climate.""" - self.scoped = Scope.VEHICLE_CMDS in scopes - - if not self.scoped: - self._attr_supported_features = ClimateEntityFeature(0) - self._attr_hvac_modes = [] - - super().__init__( - data, - side, - ) - - def _async_update_attrs(self) -> None: - """Update the attributes of the entity.""" - value = self.get("climate_state_is_climate_on") - if value: - self._attr_hvac_mode = HVACMode.HEAT_COOL - else: - self._attr_hvac_mode = HVACMode.OFF - - # If not scoped, prevent the user from changing the HVAC mode by making it the only option - if self._attr_hvac_mode and not self.scoped: - self._attr_hvac_modes = [self._attr_hvac_mode] - - self._attr_current_temperature = self.get("climate_state_inside_temp") - self._attr_target_temperature = self.get(f"climate_state_{self.key}_setting") - self._attr_preset_mode = self.get("climate_state_climate_keeper_mode") - self._attr_min_temp = cast( - float, self.get("climate_state_min_avail_temp", DEFAULT_MIN_TEMP) - ) - self._attr_max_temp = cast( - float, self.get("climate_state_max_avail_temp", DEFAULT_MAX_TEMP) - ) + _attr_preset_modes = list(PRESET_MODES.values()) + _attr_fan_modes = ["off", "bioweapon"] + _enable_turn_on_off_backwards_compatibility = False async def async_turn_on(self) -> None: """Set the climate state to on.""" - self.raise_for_scope(Scope.VEHICLE_CMDS) - await self.wake_up_if_asleep() + await handle_vehicle_command(self.api.auto_conditioning_start()) self._attr_hvac_mode = HVACMode.HEAT_COOL @@ -127,19 +110,21 @@ class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): async def async_turn_off(self) -> None: """Set the climate state to off.""" - self.raise_for_scope(Scope.VEHICLE_CMDS) - await self.wake_up_if_asleep() + await handle_vehicle_command(self.api.auto_conditioning_stop()) self._attr_hvac_mode = HVACMode.OFF self._attr_preset_mode = self._attr_preset_modes[0] + self._attr_fan_mode = self._attr_fan_modes[0] self.async_write_ha_state() async def async_set_temperature(self, **kwargs: Any) -> None: """Set the climate temperature.""" + if temp := kwargs.get(ATTR_TEMPERATURE): - await self.wake_up_if_asleep() + self.raise_for_scope(Scope.VEHICLE_CMDS) + await handle_vehicle_command( self.api.set_temps( driver_temp=temp, @@ -163,18 +148,210 @@ class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): async def async_set_preset_mode(self, preset_mode: str) -> None: """Set the climate preset mode.""" - await self.wake_up_if_asleep() + self.raise_for_scope(Scope.VEHICLE_CMDS) + await handle_vehicle_command( self.api.set_climate_keeper_mode( climate_keeper_mode=self._attr_preset_modes.index(preset_mode) ) ) self._attr_preset_mode = preset_mode - if preset_mode != self._attr_preset_modes[0]: - # Changing preset mode will also turn on climate + if preset_mode == self._attr_preset_modes[0]: + self._attr_hvac_mode = HVACMode.OFF + else: self._attr_hvac_mode = HVACMode.HEAT_COOL self.async_write_ha_state() + async def async_set_fan_mode(self, fan_mode: str) -> None: + """Set the Bioweapon defense mode.""" + self.raise_for_scope(Scope.VEHICLE_CMDS) + + await handle_vehicle_command( + self.api.set_bioweapon_mode( + on=(fan_mode != "off"), + manual_override=True, + ) + ) + self._attr_fan_mode = fan_mode + if fan_mode == self._attr_fan_modes[1]: + self._attr_hvac_mode = HVACMode.HEAT_COOL + self.async_write_ha_state() + + +class TeslemetryPollingClimateEntity(TeslemetryClimateEntity, TeslemetryVehicleEntity): + """Polling vehicle climate entity.""" + + _attr_supported_features = ( + ClimateEntityFeature.TURN_ON + | ClimateEntityFeature.TURN_OFF + | ClimateEntityFeature.TARGET_TEMPERATURE + | ClimateEntityFeature.PRESET_MODE + | ClimateEntityFeature.FAN_MODE + ) + + def __init__( + self, + data: TeslemetryVehicleData, + side: TeslemetryClimateSide, + scopes: list[Scope], + ) -> None: + """Initialize the climate.""" + self.scoped = Scope.VEHICLE_CMDS in scopes + if not self.scoped: + self._attr_supported_features = ClimateEntityFeature(0) + + super().__init__(data, side) + + def _async_update_attrs(self) -> None: + """Update the attributes of the entity.""" + value = self.get("climate_state_is_climate_on") + if value is None: + self._attr_hvac_mode = None + if value: + self._attr_hvac_mode = HVACMode.HEAT_COOL + else: + self._attr_hvac_mode = HVACMode.OFF + + self._attr_current_temperature = self.get("climate_state_inside_temp") + self._attr_target_temperature = self.get(f"climate_state_{self.key}_setting") + self._attr_preset_mode = self.get("climate_state_climate_keeper_mode") + if self.get("climate_state_bioweapon_mode"): + self._attr_fan_mode = "bioweapon" + else: + self._attr_fan_mode = "off" + self._attr_min_temp = cast( + float, self.get("climate_state_min_avail_temp", DEFAULT_MIN_TEMP) + ) + self._attr_max_temp = cast( + float, self.get("climate_state_max_avail_temp", DEFAULT_MAX_TEMP) + ) + + +class TeslemetryStreamingClimateEntity( + TeslemetryClimateEntity, TeslemetryVehicleStreamEntity, RestoreEntity +): + """Teslemetry steering wheel climate control.""" + + _attr_supported_features = ( + ClimateEntityFeature.TURN_ON + | ClimateEntityFeature.TURN_OFF + | ClimateEntityFeature.TARGET_TEMPERATURE + | ClimateEntityFeature.PRESET_MODE + ) + + def __init__( + self, + data: TeslemetryVehicleData, + side: TeslemetryClimateSide, + scopes: list[Scope], + ) -> None: + """Initialize the climate.""" + + # Initialize defaults + self._attr_hvac_mode = None + self._attr_current_temperature = None + self._attr_target_temperature = None + self._attr_fan_mode = None + self._attr_preset_mode = None + + self.scoped = Scope.VEHICLE_CMDS in scopes + if not self.scoped: + self._attr_supported_features = ClimateEntityFeature(0) + self.side = side + super().__init__( + data, + side, + ) + + self._attr_min_temp = cast( + float, + data.coordinator.data.get("climate_state_min_avail_temp", DEFAULT_MIN_TEMP), + ) + self._attr_max_temp = cast( + float, + data.coordinator.data.get("climate_state_max_avail_temp", DEFAULT_MAX_TEMP), + ) + self.rhd: bool = data.coordinator.data.get("vehicle_config_rhd", False) + + async def async_added_to_hass(self) -> None: + """Handle entity which will be added.""" + await super().async_added_to_hass() + if (state := await self.async_get_last_state()) is not None: + self._attr_hvac_mode = ( + HVACMode(state.state) if state.state in HVAC_MODES else None + ) + self._attr_current_temperature = state.attributes.get("current_temperature") + self._attr_target_temperature = state.attributes.get("temperature") + self._attr_preset_mode = state.attributes.get("preset_mode") + + self.async_on_remove( + self.vehicle.stream_vehicle.listen_InsideTemp( + self._async_handle_inside_temp + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_HvacACEnabled( + self._async_handle_hvac_ac_enabled + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_ClimateKeeperMode( + self._async_handle_climate_keeper_mode + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_RightHandDrive(self._async_handle_rhd) + ) + + if self.side == TeslemetryClimateSide.DRIVER: + if self.rhd: + self.async_on_remove( + self.vehicle.stream_vehicle.listen_HvacRightTemperatureRequest( + self._async_handle_hvac_temperature_request + ) + ) + else: + self.async_on_remove( + self.vehicle.stream_vehicle.listen_HvacLeftTemperatureRequest( + self._async_handle_hvac_temperature_request + ) + ) + elif self.side == TeslemetryClimateSide.PASSENGER: + if self.rhd: + self.async_on_remove( + self.vehicle.stream_vehicle.listen_HvacLeftTemperatureRequest( + self._async_handle_hvac_temperature_request + ) + ) + else: + self.async_on_remove( + self.vehicle.stream_vehicle.listen_HvacRightTemperatureRequest( + self._async_handle_hvac_temperature_request + ) + ) + + def _async_handle_inside_temp(self, data: float | None): + self._attr_current_temperature = data + self.async_write_ha_state() + + def _async_handle_hvac_ac_enabled(self, data: bool | None): + self._attr_hvac_mode = ( + None if data is None else HVACMode.HEAT_COOL if data else HVACMode.OFF + ) + self.async_write_ha_state() + + def _async_handle_climate_keeper_mode(self, data: str | None): + self._attr_preset_mode = PRESET_MODES.get(data) if data else None + self.async_write_ha_state() + + def _async_handle_hvac_temperature_request(self, data: float | None): + self._attr_target_temperature = data + self.async_write_ha_state() + + def _async_handle_rhd(self, data: bool | None): + if data is not None: + self.rhd = data + COP_MODES = { "Off": HVACMode.OFF, @@ -182,73 +359,27 @@ COP_MODES = { "FanOnly": HVACMode.FAN_ONLY, } -# String to celsius COP_LEVELS = { "Low": 30, "Medium": 35, "High": 40, } -# Celsius to IntEnum -TEMP_LEVELS = { - 30: CabinOverheatProtectionTemp.LOW, - 35: CabinOverheatProtectionTemp.MEDIUM, - 40: CabinOverheatProtectionTemp.HIGH, -} +class TeslemetryCabinOverheatProtectionEntity(TeslemetryRootEntity, ClimateEntity): + """Vehicle Cabin Overheat Protection.""" -class TeslemetryCabinOverheatProtectionEntity(TeslemetryVehicleEntity, ClimateEntity): - """Telemetry vehicle cabin overheat protection entity.""" + api: Vehicle _attr_precision = PRECISION_WHOLE _attr_target_temperature_step = 5 - _attr_min_temp = COP_LEVELS["Low"] - _attr_max_temp = COP_LEVELS["High"] + _attr_min_temp = 30 + _attr_max_temp = 40 _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_hvac_modes = list(COP_MODES.values()) - _attr_entity_registry_enabled_default = False - def __init__( - self, - data: TeslemetryVehicleData, - scopes: Scope, - ) -> None: - """Initialize the climate.""" - - self.scoped = Scope.VEHICLE_CMDS in scopes - if self.scoped: - self._attr_supported_features = ( - ClimateEntityFeature.TURN_ON | ClimateEntityFeature.TURN_OFF - ) - else: - self._attr_supported_features = ClimateEntityFeature(0) - self._attr_hvac_modes = [] - - super().__init__(data, "climate_state_cabin_overheat_protection") - - # Supported Features from data - if self.scoped and self.get("vehicle_config_cop_user_set_temp_supported"): - self._attr_supported_features |= ClimateEntityFeature.TARGET_TEMPERATURE - - def _async_update_attrs(self) -> None: - """Update the attributes of the entity.""" - - if (state := self.get("climate_state_cabin_overheat_protection")) is None: - self._attr_hvac_mode = None - else: - self._attr_hvac_mode = COP_MODES.get(state) - - # If not scoped, prevent the user from changing the HVAC mode by making it the only option - if self._attr_hvac_mode and not self.scoped: - self._attr_hvac_modes = [self._attr_hvac_mode] - - if (level := self.get("climate_state_cop_activation_temperature")) is None: - self._attr_target_temperature = None - else: - self._attr_target_temperature = COP_LEVELS.get(level) - - self._attr_current_temperature = self.get("climate_state_inside_temp") + _enable_turn_on_off_backwards_compatibility = False async def async_turn_on(self) -> None: """Set the climate state to on.""" @@ -260,26 +391,28 @@ class TeslemetryCabinOverheatProtectionEntity(TeslemetryVehicleEntity, ClimateEn async def async_set_temperature(self, **kwargs: Any) -> None: """Set the climate temperature.""" - self.raise_for_scope(Scope.VEHICLE_CMDS) - if (temp := kwargs.get(ATTR_TEMPERATURE)) is None or ( - cop_mode := TEMP_LEVELS.get(temp) - ) is None: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="invalid_cop_temp", - ) + if temp := kwargs.get(ATTR_TEMPERATURE): + if (cop_mode := COP_TEMPERATURES.get(temp)) is None: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_cop_temp", + ) + self.raise_for_scope(Scope.VEHICLE_CMDS) - await self.wake_up_if_asleep() - await handle_vehicle_command(self.api.set_cop_temp(cop_mode)) - self._attr_target_temperature = temp + await handle_vehicle_command(self.api.set_cop_temp(cop_mode)) + self._attr_target_temperature = temp if mode := kwargs.get(ATTR_HVAC_MODE): - await self._async_set_cop(mode) + # Set HVAC mode will call write_ha_state + await self.async_set_hvac_mode(mode) + else: + self.async_write_ha_state() - self.async_write_ha_state() + async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: + """Set the climate mode and state.""" + self.raise_for_scope(Scope.VEHICLE_CMDS) - async def _async_set_cop(self, hvac_mode: HVACMode) -> None: if hvac_mode == HVACMode.OFF: await handle_vehicle_command( self.api.set_cabin_overheat_protection(on=False, fan_only=False) @@ -294,10 +427,125 @@ class TeslemetryCabinOverheatProtectionEntity(TeslemetryVehicleEntity, ClimateEn ) self._attr_hvac_mode = hvac_mode - - async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: - """Set the climate mode and state.""" - self.raise_for_scope(Scope.VEHICLE_CMDS) - await self.wake_up_if_asleep() - await self._async_set_cop(hvac_mode) + self.async_write_ha_state() + + +class TeslemetryPollingCabinOverheatProtectionEntity( + TeslemetryVehicleEntity, TeslemetryCabinOverheatProtectionEntity +): + """Vehicle Cabin Overheat Protection.""" + + def __init__( + self, + data: TeslemetryVehicleData, + scopes: list[Scope], + ) -> None: + """Initialize the climate.""" + + super().__init__( + data, + "climate_state_cabin_overheat_protection", + ) + + # Supported Features + self._attr_supported_features = ( + ClimateEntityFeature.TURN_ON | ClimateEntityFeature.TURN_OFF + ) + if self.get("vehicle_config_cop_user_set_temp_supported"): + self._attr_supported_features |= ClimateEntityFeature.TARGET_TEMPERATURE + + # Scopes + self.scoped = Scope.VEHICLE_CMDS in scopes + if not self.scoped: + self._attr_supported_features = ClimateEntityFeature(0) + + def _async_update_attrs(self) -> None: + """Update the attributes of the entity.""" + + if (state := self.get("climate_state_cabin_overheat_protection")) is None: + self._attr_hvac_mode = None + else: + self._attr_hvac_mode = COP_MODES.get(state) + + if (level := self.get("climate_state_cop_activation_temperature")) is None: + self._attr_target_temperature = None + else: + self._attr_target_temperature = COP_LEVELS.get(level) + + self._attr_current_temperature = self.get("climate_state_inside_temp") + + +class TeslemetryStreamingCabinOverheatProtectionEntity( + TeslemetryVehicleStreamEntity, + TeslemetryCabinOverheatProtectionEntity, + RestoreEntity, +): + """Vehicle Cabin Overheat Protection.""" + + def __init__( + self, + data: TeslemetryVehicleData, + scopes: list[Scope], + ) -> None: + """Initialize the climate.""" + + # Initialize defaults + self._attr_hvac_mode = None + self._attr_current_temperature = None + self._attr_target_temperature = None + self._attr_fan_mode = None + self._attr_preset_mode = None + + super().__init__(data, "climate_state_cabin_overheat_protection") + + # Supported Features + self._attr_supported_features = ( + ClimateEntityFeature.TURN_ON | ClimateEntityFeature.TURN_OFF + ) + if data.coordinator.data.get("vehicle_config_cop_user_set_temp_supported"): + self._attr_supported_features |= ClimateEntityFeature.TARGET_TEMPERATURE + + # Scopes + self.scoped = Scope.VEHICLE_CMDS in scopes + if not self.scoped: + self._attr_supported_features = ClimateEntityFeature(0) + + async def async_added_to_hass(self) -> None: + """Handle entity which will be added.""" + await super().async_added_to_hass() + if (state := await self.async_get_last_state()) is not None: + self._attr_hvac_mode = ( + HVACMode(state.state) if state.state in HVAC_MODES else None + ) + self._attr_current_temperature = state.attributes.get("temperature") + self._attr_target_temperature = state.attributes.get("target_temperature") + + self.async_on_remove( + self.vehicle.stream_vehicle.listen_InsideTemp( + self._async_handle_inside_temp + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_CabinOverheatProtectionMode( + self._async_handle_protection_mode + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_CabinOverheatProtectionTemperatureLimit( + self._async_handle_temperature_limit + ) + ) + + def _async_handle_inside_temp(self, value: float | None): + self._attr_current_temperature = value + self.async_write_ha_state() + + def _async_handle_protection_mode(self, value: str | None): + self._attr_hvac_mode = COP_MODES.get(value) if value is not None else None + self.async_write_ha_state() + + def _async_handle_temperature_limit(self, value: str | None): + self._attr_target_temperature = ( + COP_LEVELS.get(value) if value is not None else None + ) self.async_write_ha_state() diff --git a/homeassistant/components/teslemetry/config_flow.py b/homeassistant/components/teslemetry/config_flow.py index d8cf2bd7945..a25a98d6c68 100644 --- a/homeassistant/components/teslemetry/config_flow.py +++ b/homeassistant/components/teslemetry/config_flow.py @@ -6,12 +6,12 @@ from collections.abc import Mapping from typing import Any from aiohttp import ClientConnectionError -from tesla_fleet_api import Teslemetry from tesla_fleet_api.exceptions import ( InvalidToken, SubscriptionRequired, TeslaFleetError, ) +from tesla_fleet_api.teslemetry import Teslemetry import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult diff --git a/homeassistant/components/teslemetry/coordinator.py b/homeassistant/components/teslemetry/coordinator.py index f902fb4cc1b..07549008a6c 100644 --- a/homeassistant/components/teslemetry/coordinator.py +++ b/homeassistant/components/teslemetry/coordinator.py @@ -5,13 +5,13 @@ from __future__ import annotations from datetime import datetime, timedelta from typing import TYPE_CHECKING, Any -from tesla_fleet_api import EnergySpecific, VehicleSpecific from tesla_fleet_api.const import TeslaEnergyPeriod, VehicleDataEndpoint from tesla_fleet_api.exceptions import ( InvalidToken, SubscriptionRequired, TeslaFleetError, ) +from tesla_fleet_api.teslemetry import EnergySite, Vehicle from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed @@ -49,7 +49,7 @@ class TeslemetryVehicleDataCoordinator(DataUpdateCoordinator[dict[str, Any]]): self, hass: HomeAssistant, config_entry: TeslemetryConfigEntry, - api: VehicleSpecific, + api: Vehicle, product: dict, ) -> None: """Initialize Teslemetry Vehicle Update Coordinator.""" @@ -87,7 +87,7 @@ class TeslemetryEnergySiteLiveCoordinator(DataUpdateCoordinator[dict[str, Any]]) self, hass: HomeAssistant, config_entry: TeslemetryConfigEntry, - api: EnergySpecific, + api: EnergySite, data: dict, ) -> None: """Initialize Teslemetry Energy Site Live coordinator.""" @@ -133,7 +133,7 @@ class TeslemetryEnergySiteInfoCoordinator(DataUpdateCoordinator[dict[str, Any]]) self, hass: HomeAssistant, config_entry: TeslemetryConfigEntry, - api: EnergySpecific, + api: EnergySite, product: dict, ) -> None: """Initialize Teslemetry Energy Info coordinator.""" @@ -169,7 +169,7 @@ class TeslemetryEnergyHistoryCoordinator(DataUpdateCoordinator[dict[str, Any]]): self, hass: HomeAssistant, config_entry: TeslemetryConfigEntry, - api: EnergySpecific, + api: EnergySite, ) -> None: """Initialize Teslemetry Energy Info coordinator.""" super().__init__( diff --git a/homeassistant/components/teslemetry/entity.py b/homeassistant/components/teslemetry/entity.py index 82d3db123c3..3d145d24b0c 100644 --- a/homeassistant/components/teslemetry/entity.py +++ b/homeassistant/components/teslemetry/entity.py @@ -4,8 +4,8 @@ from abc import abstractmethod from typing import Any from propcache.api import cached_property -from tesla_fleet_api import EnergySpecific, VehicleSpecific from tesla_fleet_api.const import Scope +from tesla_fleet_api.teslemetry import EnergySite, Vehicle from teslemetry_stream import Signal from homeassistant.exceptions import ServiceValidationError @@ -29,7 +29,7 @@ class TeslemetryRootEntity(Entity): _attr_has_entity_name = True scoped: bool - api: VehicleSpecific | EnergySpecific + api: Vehicle | EnergySite def raise_for_scope(self, scope: Scope): """Raise an error if a scope is not available.""" @@ -105,7 +105,7 @@ class TeslemetryVehicleEntity(TeslemetryEntity): """Parent class for Teslemetry Vehicle entities.""" _last_update: int = 0 - api: VehicleSpecific + api: Vehicle vehicle: TeslemetryVehicleData def __init__( @@ -134,7 +134,7 @@ class TeslemetryVehicleEntity(TeslemetryEntity): class TeslemetryEnergyLiveEntity(TeslemetryEntity): """Parent class for Teslemetry Energy Site Live entities.""" - api: EnergySpecific + api: EnergySite def __init__( self, @@ -155,7 +155,7 @@ class TeslemetryEnergyLiveEntity(TeslemetryEntity): class TeslemetryEnergyInfoEntity(TeslemetryEntity): """Parent class for Teslemetry Energy Site Info Entities.""" - api: EnergySpecific + api: EnergySite def __init__( self, @@ -194,7 +194,7 @@ class TeslemetryWallConnectorEntity(TeslemetryEntity): """Parent class for Teslemetry Wall Connector Entities.""" _attr_has_entity_name = True - api: EnergySpecific + api: EnergySite def __init__( self, diff --git a/homeassistant/components/teslemetry/manifest.json b/homeassistant/components/teslemetry/manifest.json index 3d37ced8cff..cae5a8f3c01 100644 --- a/homeassistant/components/teslemetry/manifest.json +++ b/homeassistant/components/teslemetry/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/teslemetry", "iot_class": "cloud_polling", "loggers": ["tesla-fleet-api"], - "requirements": ["tesla-fleet-api==0.9.13", "teslemetry-stream==0.6.12"] + "requirements": ["tesla-fleet-api==1.0.16", "teslemetry-stream==0.6.12"] } diff --git a/homeassistant/components/teslemetry/media_player.py b/homeassistant/components/teslemetry/media_player.py index 409b409e325..50f15618e66 100644 --- a/homeassistant/components/teslemetry/media_player.py +++ b/homeassistant/components/teslemetry/media_player.py @@ -2,8 +2,8 @@ from __future__ import annotations -from tesla_fleet_api import VehicleSpecific from tesla_fleet_api.const import Scope +from tesla_fleet_api.teslemetry import Vehicle from homeassistant.components.media_player import ( MediaPlayerDeviceClass, @@ -62,7 +62,7 @@ async def async_setup_entry( class TeslemetryMediaEntity(TeslemetryRootEntity, MediaPlayerEntity): """Base vehicle media player class.""" - api: VehicleSpecific + api: Vehicle _attr_device_class = MediaPlayerDeviceClass.SPEAKER _attr_volume_step = VOLUME_STEP diff --git a/homeassistant/components/teslemetry/models.py b/homeassistant/components/teslemetry/models.py index 5b78386c68a..fd6cf12b5b9 100644 --- a/homeassistant/components/teslemetry/models.py +++ b/homeassistant/components/teslemetry/models.py @@ -6,8 +6,8 @@ import asyncio from collections.abc import Callable from dataclasses import dataclass -from tesla_fleet_api import EnergySpecific, VehicleSpecific from tesla_fleet_api.const import Scope +from tesla_fleet_api.teslemetry import EnergySite, Vehicle from teslemetry_stream import TeslemetryStream, TeslemetryStreamVehicle from homeassistant.config_entries import ConfigEntry @@ -34,7 +34,7 @@ class TeslemetryData: class TeslemetryVehicleData: """Data for a vehicle in the Teslemetry integration.""" - api: VehicleSpecific + api: Vehicle config_entry: ConfigEntry coordinator: TeslemetryVehicleDataCoordinator stream: TeslemetryStream @@ -50,7 +50,7 @@ class TeslemetryVehicleData: class TeslemetryEnergyData: """Data for a vehicle in the Teslemetry integration.""" - api: EnergySpecific + api: EnergySite live_coordinator: TeslemetryEnergySiteLiveCoordinator | None info_coordinator: TeslemetryEnergySiteInfoCoordinator history_coordinator: TeslemetryEnergyHistoryCoordinator | None diff --git a/homeassistant/components/teslemetry/number.py b/homeassistant/components/teslemetry/number.py index 10c15a68b09..ff25dec59b8 100644 --- a/homeassistant/components/teslemetry/number.py +++ b/homeassistant/components/teslemetry/number.py @@ -7,8 +7,8 @@ from dataclasses import dataclass from itertools import chain from typing import Any -from tesla_fleet_api import EnergySpecific, VehicleSpecific from tesla_fleet_api.const import Scope +from tesla_fleet_api.teslemetry import EnergySite, Vehicle from teslemetry_stream import TeslemetryStreamVehicle from homeassistant.components.number import ( @@ -46,7 +46,7 @@ PARALLEL_UPDATES = 0 class TeslemetryNumberVehicleEntityDescription(NumberEntityDescription): """Describes Teslemetry Number entity.""" - func: Callable[[VehicleSpecific, int], Awaitable[Any]] + func: Callable[[Vehicle, int], Awaitable[Any]] min_key: str | None = None max_key: str native_min_value: float @@ -99,7 +99,7 @@ VEHICLE_DESCRIPTIONS: tuple[TeslemetryNumberVehicleEntityDescription, ...] = ( class TeslemetryNumberBatteryEntityDescription(NumberEntityDescription): """Describes Teslemetry Number entity.""" - func: Callable[[EnergySpecific, float], Awaitable[Any]] + func: Callable[[EnergySite, float], Awaitable[Any]] requires: str | None = None scopes: list[Scope] diff --git a/homeassistant/components/teslemetry/select.py b/homeassistant/components/teslemetry/select.py index 0d268e302de..9e13d15edc4 100644 --- a/homeassistant/components/teslemetry/select.py +++ b/homeassistant/components/teslemetry/select.py @@ -7,8 +7,8 @@ from dataclasses import dataclass from itertools import chain from typing import Any -from tesla_fleet_api import VehicleSpecific from tesla_fleet_api.const import EnergyExportMode, EnergyOperationMode, Scope, Seat +from tesla_fleet_api.teslemetry import Vehicle from teslemetry_stream import TeslemetryStreamVehicle from homeassistant.components.select import SelectEntity, SelectEntityDescription @@ -40,7 +40,7 @@ LEVEL = {OFF: 0, LOW: 1, MEDIUM: 2, HIGH: 3} class TeslemetrySelectEntityDescription(SelectEntityDescription): """Seat Heater entity description.""" - select_fn: Callable[[VehicleSpecific, int], Awaitable[Any]] + select_fn: Callable[[Vehicle, int], Awaitable[Any]] supported_fn: Callable[[dict], bool] = lambda _: True streaming_listener: ( Callable[ diff --git a/homeassistant/components/teslemetry/strings.json b/homeassistant/components/teslemetry/strings.json index c1df7d5aa57..c4013800294 100644 --- a/homeassistant/components/teslemetry/strings.json +++ b/homeassistant/components/teslemetry/strings.json @@ -1,7 +1,7 @@ { "config": { "abort": { - "already_configured": "Account is already configured", + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "reauth_account_mismatch": "The reauthentication account does not match the original account" }, @@ -226,6 +226,12 @@ "dog": "Dog mode", "camp": "Camp mode" } + }, + "fan_mode": { + "state": { + "off": "[%key:common::state::off%]", + "bioweapon": "Bioweapon defense" + } } } } @@ -259,7 +265,7 @@ "high": "High", "low": "Low", "medium": "Medium", - "off": "Off" + "off": "[%key:common::state::off%]" } }, "climate_state_seat_heater_rear_center": { @@ -268,7 +274,7 @@ "high": "[%key:component::teslemetry::entity::select::climate_state_seat_heater_left::state::high%]", "low": "[%key:component::teslemetry::entity::select::climate_state_seat_heater_left::state::low%]", "medium": "[%key:component::teslemetry::entity::select::climate_state_seat_heater_left::state::medium%]", - "off": "[%key:component::teslemetry::entity::select::climate_state_seat_heater_left::state::off%]" + "off": "[%key:common::state::off%]" } }, "climate_state_seat_heater_rear_left": { @@ -277,7 +283,7 @@ "high": "[%key:component::teslemetry::entity::select::climate_state_seat_heater_left::state::high%]", "low": "[%key:component::teslemetry::entity::select::climate_state_seat_heater_left::state::low%]", "medium": "[%key:component::teslemetry::entity::select::climate_state_seat_heater_left::state::medium%]", - "off": "[%key:component::teslemetry::entity::select::climate_state_seat_heater_left::state::off%]" + "off": "[%key:common::state::off%]" } }, "climate_state_seat_heater_rear_right": { @@ -286,7 +292,7 @@ "high": "[%key:component::teslemetry::entity::select::climate_state_seat_heater_left::state::high%]", "low": "[%key:component::teslemetry::entity::select::climate_state_seat_heater_left::state::low%]", "medium": "[%key:component::teslemetry::entity::select::climate_state_seat_heater_left::state::medium%]", - "off": "[%key:component::teslemetry::entity::select::climate_state_seat_heater_left::state::off%]" + "off": "[%key:common::state::off%]" } }, "climate_state_seat_heater_right": { @@ -295,7 +301,7 @@ "high": "[%key:component::teslemetry::entity::select::climate_state_seat_heater_left::state::high%]", "low": "[%key:component::teslemetry::entity::select::climate_state_seat_heater_left::state::low%]", "medium": "[%key:component::teslemetry::entity::select::climate_state_seat_heater_left::state::medium%]", - "off": "[%key:component::teslemetry::entity::select::climate_state_seat_heater_left::state::off%]" + "off": "[%key:common::state::off%]" } }, "climate_state_seat_heater_third_row_left": { @@ -304,7 +310,7 @@ "high": "[%key:component::teslemetry::entity::select::climate_state_seat_heater_left::state::high%]", "low": "[%key:component::teslemetry::entity::select::climate_state_seat_heater_left::state::low%]", "medium": "[%key:component::teslemetry::entity::select::climate_state_seat_heater_left::state::medium%]", - "off": "[%key:component::teslemetry::entity::select::climate_state_seat_heater_left::state::off%]" + "off": "[%key:common::state::off%]" } }, "climate_state_seat_heater_third_row_right": { @@ -313,7 +319,7 @@ "high": "[%key:component::teslemetry::entity::select::climate_state_seat_heater_left::state::high%]", "low": "[%key:component::teslemetry::entity::select::climate_state_seat_heater_left::state::low%]", "medium": "[%key:component::teslemetry::entity::select::climate_state_seat_heater_left::state::medium%]", - "off": "[%key:component::teslemetry::entity::select::climate_state_seat_heater_left::state::off%]" + "off": "[%key:common::state::off%]" } }, "climate_state_steering_wheel_heat_level": { @@ -321,7 +327,7 @@ "state": { "high": "[%key:component::teslemetry::entity::select::climate_state_seat_heater_left::state::high%]", "low": "[%key:component::teslemetry::entity::select::climate_state_seat_heater_left::state::low%]", - "off": "[%key:component::teslemetry::entity::select::climate_state_seat_heater_left::state::off%]" + "off": "[%key:common::state::off%]" } }, "components_customer_preferred_export_rule": { @@ -416,8 +422,8 @@ "state": { "starting": "Starting", "charging": "[%key:common::state::charging%]", - "disconnected": "Disconnected", - "stopped": "Stopped", + "disconnected": "[%key:common::state::disconnected%]", + "stopped": "[%key:common::state::stopped%]", "complete": "Complete", "no_power": "No power" } @@ -720,7 +726,7 @@ }, "enable": { "description": "Enable or disable scheduled charging.", - "name": "Enable" + "name": "[%key:common::action::enable%]" }, "time": { "description": "Time to start charging.", @@ -742,7 +748,7 @@ }, "enable": { "description": "Enable or disable scheduled departure.", - "name": "Enable" + "name": "[%key:common::action::enable%]" }, "end_off_peak_time": { "description": "Time to complete charging by.", @@ -776,7 +782,7 @@ }, "enable": { "description": "Enable or disable speed limit.", - "name": "Enable" + "name": "[%key:common::action::enable%]" }, "pin": { "description": "4 digit PIN.", @@ -808,7 +814,7 @@ }, "enable": { "description": "Enable or disable valet mode.", - "name": "Enable" + "name": "[%key:common::action::enable%]" }, "pin": { "description": "4 digit PIN.", diff --git a/homeassistant/components/teslemetry/switch.py b/homeassistant/components/teslemetry/switch.py index 516a6f9852f..645a8398820 100644 --- a/homeassistant/components/teslemetry/switch.py +++ b/homeassistant/components/teslemetry/switch.py @@ -7,7 +7,7 @@ from dataclasses import dataclass from itertools import chain from typing import Any -from tesla_fleet_api.const import Scope +from tesla_fleet_api.const import AutoSeat, Scope from teslemetry_stream import TeslemetryStreamVehicle from homeassistant.components.switch import ( @@ -62,15 +62,23 @@ VEHICLE_DESCRIPTIONS: tuple[TeslemetrySwitchEntityDescription, ...] = ( TeslemetrySwitchEntityDescription( key="climate_state_auto_seat_climate_left", streaming_listener=lambda x, y: x.listen_AutoSeatClimateLeft(y), - on_func=lambda api: api.remote_auto_seat_climate_request(1, True), - off_func=lambda api: api.remote_auto_seat_climate_request(1, False), + on_func=lambda api: api.remote_auto_seat_climate_request( + AutoSeat.FRONT_LEFT, True + ), + off_func=lambda api: api.remote_auto_seat_climate_request( + AutoSeat.FRONT_LEFT, False + ), scopes=[Scope.VEHICLE_CMDS], ), TeslemetrySwitchEntityDescription( key="climate_state_auto_seat_climate_right", streaming_listener=lambda x, y: x.listen_AutoSeatClimateRight(y), - on_func=lambda api: api.remote_auto_seat_climate_request(2, True), - off_func=lambda api: api.remote_auto_seat_climate_request(2, False), + on_func=lambda api: api.remote_auto_seat_climate_request( + AutoSeat.FRONT_RIGHT, True + ), + off_func=lambda api: api.remote_auto_seat_climate_request( + AutoSeat.FRONT_RIGHT, False + ), scopes=[Scope.VEHICLE_CMDS], ), TeslemetrySwitchEntityDescription( diff --git a/homeassistant/components/teslemetry/update.py b/homeassistant/components/teslemetry/update.py index 0b0255508e0..b8d40877de4 100644 --- a/homeassistant/components/teslemetry/update.py +++ b/homeassistant/components/teslemetry/update.py @@ -5,7 +5,7 @@ from __future__ import annotations from typing import Any from tesla_fleet_api.const import Scope -from tesla_fleet_api.vehiclespecific import VehicleSpecific +from tesla_fleet_api.teslemetry import Vehicle from homeassistant.components.update import UpdateEntity, UpdateEntityFeature from homeassistant.core import HomeAssistant @@ -48,7 +48,7 @@ async def async_setup_entry( class TeslemetryUpdateEntity(TeslemetryRootEntity, UpdateEntity): """Teslemetry Updates entity.""" - api: VehicleSpecific + api: Vehicle _attr_supported_features = UpdateEntityFeature.PROGRESS async def async_install( diff --git a/homeassistant/components/tessie/__init__.py b/homeassistant/components/tessie/__init__.py index f73ecc7a729..e247931e3ba 100644 --- a/homeassistant/components/tessie/__init__.py +++ b/homeassistant/components/tessie/__init__.py @@ -5,9 +5,9 @@ from http import HTTPStatus import logging from aiohttp import ClientError, ClientResponseError -from tesla_fleet_api import EnergySpecific, Tessie from tesla_fleet_api.const import Scope from tesla_fleet_api.exceptions import TeslaFleetError +from tesla_fleet_api.tessie import Tessie from tessie_api import get_state_of_all_vehicles from homeassistant.config_entries import ConfigEntry @@ -123,7 +123,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TessieConfigEntry) -> bo ) continue - api = EnergySpecific(tessie.energy, site_id) + api = tessie.energySites.create(site_id) energysites.append( TessieEnergyData( api=api, diff --git a/homeassistant/components/tessie/coordinator.py b/homeassistant/components/tessie/coordinator.py index b06fe6123a5..2382595b058 100644 --- a/homeassistant/components/tessie/coordinator.py +++ b/homeassistant/components/tessie/coordinator.py @@ -8,8 +8,8 @@ import logging from typing import TYPE_CHECKING, Any from aiohttp import ClientResponseError -from tesla_fleet_api import EnergySpecific from tesla_fleet_api.exceptions import InvalidToken, MissingToken, TeslaFleetError +from tesla_fleet_api.tessie import EnergySite from tessie_api import get_state, get_status from homeassistant.core import HomeAssistant @@ -102,7 +102,7 @@ class TessieEnergySiteLiveCoordinator(DataUpdateCoordinator[dict[str, Any]]): config_entry: TessieConfigEntry def __init__( - self, hass: HomeAssistant, config_entry: TessieConfigEntry, api: EnergySpecific + self, hass: HomeAssistant, config_entry: TessieConfigEntry, api: EnergySite ) -> None: """Initialize Tessie Energy Site Live coordinator.""" super().__init__( @@ -138,7 +138,7 @@ class TessieEnergySiteInfoCoordinator(DataUpdateCoordinator[dict[str, Any]]): config_entry: TessieConfigEntry def __init__( - self, hass: HomeAssistant, config_entry: TessieConfigEntry, api: EnergySpecific + self, hass: HomeAssistant, config_entry: TessieConfigEntry, api: EnergySite ) -> None: """Initialize Tessie Energy Info coordinator.""" super().__init__( diff --git a/homeassistant/components/tessie/manifest.json b/homeassistant/components/tessie/manifest.json index 4ddd63552f0..3f96bb226ab 100644 --- a/homeassistant/components/tessie/manifest.json +++ b/homeassistant/components/tessie/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/tessie", "iot_class": "cloud_polling", "loggers": ["tessie", "tesla-fleet-api"], - "requirements": ["tessie-api==0.1.1", "tesla-fleet-api==0.9.13"] + "requirements": ["tessie-api==0.1.1", "tesla-fleet-api==1.0.16"] } diff --git a/homeassistant/components/tessie/models.py b/homeassistant/components/tessie/models.py index ca670b9650b..03652782cfe 100644 --- a/homeassistant/components/tessie/models.py +++ b/homeassistant/components/tessie/models.py @@ -4,7 +4,7 @@ from __future__ import annotations from dataclasses import dataclass -from tesla_fleet_api import EnergySpecific +from tesla_fleet_api.tessie import EnergySite from homeassistant.helpers.device_registry import DeviceInfo @@ -27,7 +27,7 @@ class TessieData: class TessieEnergyData: """Data for a Energy Site in the Tessie integration.""" - api: EnergySpecific + api: EnergySite live_coordinator: TessieEnergySiteLiveCoordinator info_coordinator: TessieEnergySiteInfoCoordinator id: int diff --git a/homeassistant/components/tessie/number.py b/homeassistant/components/tessie/number.py index 1e857345278..77d8037fb14 100644 --- a/homeassistant/components/tessie/number.py +++ b/homeassistant/components/tessie/number.py @@ -7,7 +7,7 @@ from dataclasses import dataclass from itertools import chain from typing import Any -from tesla_fleet_api import EnergySpecific +from tesla_fleet_api.tessie import EnergySite from tessie_api import set_charge_limit, set_charging_amps, set_speed_limit from homeassistant.components.number import ( @@ -90,7 +90,7 @@ VEHICLE_DESCRIPTIONS: tuple[TessieNumberEntityDescription, ...] = ( class TessieNumberBatteryEntityDescription(NumberEntityDescription): """Describes Tessie Number entity.""" - func: Callable[[EnergySpecific, float], Awaitable[Any]] + func: Callable[[EnergySite, float], Awaitable[Any]] requires: str diff --git a/homeassistant/components/tessie/strings.json b/homeassistant/components/tessie/strings.json index 4f0f5f67ebd..f956e9cefd6 100644 --- a/homeassistant/components/tessie/strings.json +++ b/homeassistant/components/tessie/strings.json @@ -76,8 +76,8 @@ "state": { "starting": "Starting", "charging": "[%key:common::state::charging%]", - "disconnected": "Disconnected", - "stopped": "Stopped", + "disconnected": "[%key:common::state::disconnected%]", + "stopped": "[%key:common::state::stopped%]", "complete": "Complete", "no_power": "No power" } diff --git a/homeassistant/components/thermador/__init__.py b/homeassistant/components/thermador/__init__.py new file mode 100644 index 00000000000..2bd83b2ff71 --- /dev/null +++ b/homeassistant/components/thermador/__init__.py @@ -0,0 +1 @@ +"""Thermador virtual integration.""" diff --git a/homeassistant/components/thermador/manifest.json b/homeassistant/components/thermador/manifest.json new file mode 100644 index 00000000000..b09861623de --- /dev/null +++ b/homeassistant/components/thermador/manifest.json @@ -0,0 +1,6 @@ +{ + "domain": "thermador", + "name": "Thermador", + "integration_type": "virtual", + "supported_by": "home_connect" +} diff --git a/homeassistant/components/tibber/coordinator.py b/homeassistant/components/tibber/coordinator.py index 2de9ebd1ec6..e565fdc7dd8 100644 --- a/homeassistant/components/tibber/coordinator.py +++ b/homeassistant/components/tibber/coordinator.py @@ -9,7 +9,11 @@ from typing import cast import tibber from homeassistant.components.recorder import get_instance -from homeassistant.components.recorder.models import StatisticData, StatisticMetaData +from homeassistant.components.recorder.models import ( + StatisticData, + StatisticMeanType, + StatisticMetaData, +) from homeassistant.components.recorder.statistics import ( async_add_external_statistics, get_last_statistics, @@ -159,7 +163,7 @@ class TibberDataCoordinator(DataUpdateCoordinator[None]): ) metadata = StatisticMetaData( - has_mean=False, + mean_type=StatisticMeanType.NONE, has_sum=True, name=f"{home.name} {sensor_type}", source=TIBBER_DOMAIN, diff --git a/homeassistant/components/traccar_server/strings.json b/homeassistant/components/traccar_server/strings.json index 8bec4b112ac..3487f41efaa 100644 --- a/homeassistant/components/traccar_server/strings.json +++ b/homeassistant/components/traccar_server/strings.json @@ -47,7 +47,7 @@ "motion": { "name": "Motion", "state": { - "off": "Stopped", + "off": "[%key:common::state::stopped%]", "on": "Moving" } }, diff --git a/homeassistant/components/tractive/device_tracker.py b/homeassistant/components/tractive/device_tracker.py index 73be7216a2f..bd1380ade4c 100644 --- a/homeassistant/components/tractive/device_tracker.py +++ b/homeassistant/components/tractive/device_tracker.py @@ -55,11 +55,9 @@ class TractiveDeviceTracker(TractiveEntity, TrackerEntity): @property def source_type(self) -> SourceType: - """Return the source type, eg gps or router, of the device.""" + """Return the source type of the device.""" if self._source_type == "PHONE": return SourceType.BLUETOOTH - if self._source_type == "KNOWN_WIFI": - return SourceType.ROUTER return SourceType.GPS @property diff --git a/homeassistant/components/tradfri/config_flow.py b/homeassistant/components/tradfri/config_flow.py index 9f5b39a9657..f4adb1cc09e 100644 --- a/homeassistant/components/tradfri/config_flow.py +++ b/homeassistant/components/tradfri/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations import asyncio -from typing import Any +from typing import Any, cast from uuid import uuid4 from pytradfri import Gateway, RequestError @@ -54,7 +54,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): errors: dict[str, str] = {} if user_input is not None: - host = user_input.get(CONF_HOST, self._host) + host = cast(str, user_input.get(CONF_HOST, self._host)) try: auth = await authenticate( self.hass, host, user_input[KEY_SECURITY_CODE] diff --git a/homeassistant/components/tradfri/strings.json b/homeassistant/components/tradfri/strings.json index 9ed7e167e71..66c46dd482e 100644 --- a/homeassistant/components/tradfri/strings.json +++ b/homeassistant/components/tradfri/strings.json @@ -6,7 +6,7 @@ "description": "You can find the security code on the back of your gateway.", "data": { "host": "[%key:common::config_flow::data::host%]", - "security_code": "Security Code" + "security_code": "Security code" }, "data_description": { "host": "Hostname or IP address of your Trådfri gateway." @@ -14,7 +14,7 @@ } }, "error": { - "invalid_security_code": "Failed to register with provided key. If this keeps happening, try restarting the gateway.", + "invalid_security_code": "Failed to register with provided code. If this keeps happening, try restarting the gateway.", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "timeout": "Timeout validating the code.", "cannot_authenticate": "Cannot authenticate, is Gateway paired with another server like e.g. Homekit?" diff --git a/homeassistant/components/trafikverket_ferry/config_flow.py b/homeassistant/components/trafikverket_ferry/config_flow.py index 002dc421273..dfa64ed2953 100644 --- a/homeassistant/components/trafikverket_ferry/config_flow.py +++ b/homeassistant/components/trafikverket_ferry/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +import logging from typing import Any from pytrafikverket import TrafikverketFerry @@ -17,6 +18,8 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import CONF_FROM, CONF_TIME, CONF_TO, DOMAIN from .util import create_unique_id +_LOGGER = logging.getLogger(__name__) + DATA_SCHEMA = vol.Schema( { vol.Required(CONF_API_KEY): selector.TextSelector( @@ -81,7 +84,8 @@ class TVFerryConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_auth" except NoFerryFound: errors["base"] = "invalid_route" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "cannot_connect" else: return self.async_update_reload_and_abort( @@ -120,7 +124,8 @@ class TVFerryConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_auth" except NoFerryFound: errors["base"] = "invalid_route" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "cannot_connect" else: if not errors: diff --git a/homeassistant/components/trafikverket_train/config_flow.py b/homeassistant/components/trafikverket_train/config_flow.py index f6a58e464a1..eb0a4a45791 100644 --- a/homeassistant/components/trafikverket_train/config_flow.py +++ b/homeassistant/components/trafikverket_train/config_flow.py @@ -86,8 +86,8 @@ async def validate_station( except UnknownError as error: _LOGGER.error("Unknown error occurred during validation %s", str(error)) errors["base"] = "cannot_connect" - except Exception as error: # noqa: BLE001 - _LOGGER.error("Unknown exception occurred during validation %s", str(error)) + except Exception: + _LOGGER.exception("Unknown exception occurred during validation") errors["base"] = "cannot_connect" return (stations, errors) diff --git a/homeassistant/components/trafikverket_weatherstation/config_flow.py b/homeassistant/components/trafikverket_weatherstation/config_flow.py index f4316b887b3..ee9fe264692 100644 --- a/homeassistant/components/trafikverket_weatherstation/config_flow.py +++ b/homeassistant/components/trafikverket_weatherstation/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +import logging from typing import Any from pytrafikverket.exceptions import ( @@ -25,6 +26,8 @@ from homeassistant.helpers.selector import ( from .const import CONF_STATION, DOMAIN +_LOGGER = logging.getLogger(__name__) + class TVWeatherConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Trafikverket Weatherstation integration.""" @@ -56,7 +59,8 @@ class TVWeatherConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_station" except MultipleWeatherStationsFound: errors["base"] = "more_stations" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected error") errors["base"] = "cannot_connect" else: return self.async_create_entry( @@ -102,7 +106,8 @@ class TVWeatherConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_station" except MultipleWeatherStationsFound: errors["base"] = "more_stations" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "cannot_connect" else: return self.async_update_reload_and_abort( @@ -132,7 +137,8 @@ class TVWeatherConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_station" except MultipleWeatherStationsFound: errors["base"] = "more_stations" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "cannot_connect" else: return self.async_update_reload_and_abort( diff --git a/homeassistant/components/trafikverket_weatherstation/sensor.py b/homeassistant/components/trafikverket_weatherstation/sensor.py index cb923037a24..bbc6764e3ef 100644 --- a/homeassistant/components/trafikverket_weatherstation/sensor.py +++ b/homeassistant/components/trafikverket_weatherstation/sensor.py @@ -89,7 +89,8 @@ SENSOR_TYPES: tuple[TrafikverketSensorEntityDescription, ...] = ( translation_key="wind_direction", value_fn=lambda data: data.winddirection, native_unit_of_measurement=DEGREE, - state_class=SensorStateClass.MEASUREMENT, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + device_class=SensorDeviceClass.WIND_DIRECTION, ), TrafikverketSensorEntityDescription( key="wind_speed", diff --git a/homeassistant/components/triggercmd/config_flow.py b/homeassistant/components/triggercmd/config_flow.py index fc02dd0b2fc..48c4eacfd5a 100644 --- a/homeassistant/components/triggercmd/config_flow.py +++ b/homeassistant/components/triggercmd/config_flow.py @@ -57,7 +57,7 @@ class TriggerCMDConfigFlow(ConfigFlow, domain=DOMAIN): errors[CONF_TOKEN] = "invalid_token" except TRIGGERcmdConnectionError: errors["base"] = "cannot_connect" - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: diff --git a/homeassistant/components/tuya/sensor.py b/homeassistant/components/tuya/sensor.py index b1150be306a..9e40bda5d4d 100644 --- a/homeassistant/components/tuya/sensor.py +++ b/homeassistant/components/tuya/sensor.py @@ -454,6 +454,37 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = { state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, ), + TuyaSensorEntityDescription( + key=DPCode.VA_TEMPERATURE, + translation_key="temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + ), + TuyaSensorEntityDescription( + key=DPCode.TEMP_CURRENT, + translation_key="temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + ), + TuyaSensorEntityDescription( + key=DPCode.VA_HUMIDITY, + translation_key="humidity", + device_class=SensorDeviceClass.HUMIDITY, + state_class=SensorStateClass.MEASUREMENT, + ), + TuyaSensorEntityDescription( + key=DPCode.HUMIDITY_VALUE, + translation_key="humidity", + device_class=SensorDeviceClass.HUMIDITY, + state_class=SensorStateClass.MEASUREMENT, + ), + TuyaSensorEntityDescription( + key=DPCode.BRIGHT_VALUE, + translation_key="illuminance", + device_class=SensorDeviceClass.ILLUMINANCE, + state_class=SensorStateClass.MEASUREMENT, + ), + *BATTERY_SENSORS, ), # Luminance Sensor # https://developer.tuya.com/en/docs/iot/categoryldcg?id=Kaiuz3n7u69l8 @@ -801,7 +832,6 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = { translation_key="total_power", device_class=SensorDeviceClass.POWER, state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfPower.KILO_WATT, subkey="power", ), TuyaSensorEntityDescription( diff --git a/homeassistant/components/usb/__init__.py b/homeassistant/components/usb/__init__.py index 994f4f71c35..90433b0f728 100644 --- a/homeassistant/components/usb/__init__.py +++ b/homeassistant/components/usb/__init__.py @@ -14,8 +14,6 @@ import sys from typing import Any, overload from aiousbwatcher import AIOUSBWatcher, InotifyNotAvailableError -from serial.tools.list_ports import comports -from serial.tools.list_ports_common import ListPortInfo import voluptuous as vol from homeassistant import config_entries @@ -43,7 +41,10 @@ from homeassistant.loader import USBMatcher, async_get_usb from .const import DOMAIN from .models import USBDevice -from .utils import usb_device_from_port +from .utils import ( + scan_serial_ports, + usb_device_from_port, # noqa: F401 +) _LOGGER = logging.getLogger(__name__) @@ -241,6 +242,13 @@ def _is_matching(device: USBDevice, matcher: USBMatcher | USBCallbackMatcher) -> return True +async def async_request_scan(hass: HomeAssistant) -> None: + """Request a USB scan.""" + usb_discovery: USBDiscovery = hass.data[DOMAIN] + if not usb_discovery.observer_active: + await usb_discovery.async_request_scan() + + class USBDiscovery: """Manage USB Discovery.""" @@ -417,14 +425,8 @@ class USBDiscovery: service_info, ) - async def _async_process_ports(self, ports: Sequence[ListPortInfo]) -> None: + async def _async_process_ports(self, usb_devices: Sequence[USBDevice]) -> None: """Process each discovered port.""" - _LOGGER.debug("Processing ports: %r", ports) - usb_devices = { - usb_device_from_port(port) - for port in ports - if port.vid is not None or port.pid is not None - } _LOGGER.debug("USB devices: %r", usb_devices) # CP2102N chips create *two* serial ports on macOS: `/dev/cu.usbserial-` and @@ -436,7 +438,7 @@ class USBDiscovery: if dev.device.startswith("/dev/cu.SLAB_USBtoUART") } - usb_devices = { + filtered_usb_devices = { dev for dev in usb_devices if dev.serial_number not in silabs_serials @@ -445,10 +447,12 @@ class USBDiscovery: and dev.device.startswith("/dev/cu.SLAB_USBtoUART") ) } + else: + filtered_usb_devices = set(usb_devices) - added_devices = usb_devices - self._last_processed_devices - removed_devices = self._last_processed_devices - usb_devices - self._last_processed_devices = usb_devices + added_devices = filtered_usb_devices - self._last_processed_devices + removed_devices = self._last_processed_devices - filtered_usb_devices + self._last_processed_devices = filtered_usb_devices _LOGGER.debug( "Added devices: %r, removed devices: %r", added_devices, removed_devices @@ -461,7 +465,7 @@ class USBDiscovery: except Exception: _LOGGER.exception("Error in USB port event callback") - for usb_device in usb_devices: + for usb_device in filtered_usb_devices: await self._async_process_discovered_usb_device(usb_device) @hass_callback @@ -483,7 +487,7 @@ class USBDiscovery: _LOGGER.debug("Executing comports scan") async with self._scan_lock: await self._async_process_ports( - await self.hass.async_add_executor_job(comports) + await self.hass.async_add_executor_job(scan_serial_ports) ) if self.initial_scan_done: return @@ -521,9 +525,7 @@ async def websocket_usb_scan( msg: dict[str, Any], ) -> None: """Scan for new usb devices.""" - usb_discovery: USBDiscovery = hass.data[DOMAIN] - if not usb_discovery.observer_active: - await usb_discovery.async_request_scan() + await async_request_scan(hass) connection.send_result(msg["id"]) diff --git a/homeassistant/components/usb/utils.py b/homeassistant/components/usb/utils.py index d1d6fb17f3c..1bb620ec5f7 100644 --- a/homeassistant/components/usb/utils.py +++ b/homeassistant/components/usb/utils.py @@ -2,6 +2,9 @@ from __future__ import annotations +from collections.abc import Sequence + +from serial.tools.list_ports import comports from serial.tools.list_ports_common import ListPortInfo from .models import USBDevice @@ -17,3 +20,12 @@ def usb_device_from_port(port: ListPortInfo) -> USBDevice: manufacturer=port.manufacturer, description=port.description, ) + + +def scan_serial_ports() -> Sequence[USBDevice]: + """Scan serial ports for USB devices.""" + return [ + usb_device_from_port(port) + for port in comports() + if port.vid is not None or port.pid is not None + ] diff --git a/homeassistant/components/vallox/config_flow.py b/homeassistant/components/vallox/config_flow.py index 30d1d153d9e..c7e6af8891a 100644 --- a/homeassistant/components/vallox/config_flow.py +++ b/homeassistant/components/vallox/config_flow.py @@ -108,7 +108,7 @@ class ValloxConfigFlow(ConfigFlow, domain=DOMAIN): errors[CONF_HOST] = "invalid_host" except ValloxApiException: errors[CONF_HOST] = "cannot_connect" - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unexpected exception") errors[CONF_HOST] = "unknown" else: diff --git a/homeassistant/components/valve/strings.json b/homeassistant/components/valve/strings.json index b86ec371b34..39dc297fe7d 100644 --- a/homeassistant/components/valve/strings.json +++ b/homeassistant/components/valve/strings.json @@ -5,10 +5,10 @@ "name": "[%key:component::valve::title%]", "state": { "open": "[%key:common::state::open%]", - "opening": "Opening", + "opening": "[%key:common::state::opening%]", "closed": "[%key:common::state::closed%]", - "closing": "Closing", - "stopped": "Stopped" + "closing": "[%key:common::state::closing%]", + "stopped": "[%key:common::state::stopped%]" }, "state_attributes": { "current_position": { diff --git a/homeassistant/components/velbus/strings.json b/homeassistant/components/velbus/strings.json index a50395af115..35f94e54470 100644 --- a/homeassistant/components/velbus/strings.json +++ b/homeassistant/components/velbus/strings.json @@ -2,10 +2,11 @@ "config": { "step": { "user": { - "title": "Define the Velbus connection type", - "data": { - "name": "The name for this Velbus connection", - "port": "Connection string" + "title": "Define the Velbus connection", + "description": "How do you want to configure the Velbus hub?", + "menu_options": { + "network": "Via network connection", + "usbselect": "Via USB device" } }, "network": { diff --git a/homeassistant/components/venstar/strings.json b/homeassistant/components/venstar/strings.json index fdc75162651..1d916d0b8f6 100644 --- a/homeassistant/components/venstar/strings.json +++ b/homeassistant/components/venstar/strings.json @@ -32,7 +32,7 @@ "name": "Filter usage" }, "schedule_part": { - "name": "Schedule Part", + "name": "Schedule part", "state": { "morning": "Morning", "day": "Day", @@ -44,7 +44,7 @@ "active_stage": { "name": "Active stage", "state": { - "idle": "Idle", + "idle": "[%key:common::state::idle%]", "first_stage": "First stage", "second_stage": "Second stage" } diff --git a/homeassistant/components/vicare/strings.json b/homeassistant/components/vicare/strings.json index 04049f026bd..6ed0a2f018b 100644 --- a/homeassistant/components/vicare/strings.json +++ b/homeassistant/components/vicare/strings.json @@ -362,9 +362,9 @@ "ess_state": { "name": "Battery state", "state": { - "charge": "Charging", - "discharge": "Discharging", - "standby": "Standby" + "charge": "[%key:common::state::charging%]", + "discharge": "[%key:common::state::discharging%]", + "standby": "[%key:common::state::standby%]" } }, "ess_discharge_today": { @@ -412,7 +412,7 @@ "photovoltaic_status": { "name": "PV state", "state": { - "ready": "Standby", + "ready": "[%key:common::state::standby%]", "production": "Producing" } }, diff --git a/homeassistant/components/vilfo/config_flow.py b/homeassistant/components/vilfo/config_flow.py index cdba7f1b8c2..5612591c595 100644 --- a/homeassistant/components/vilfo/config_flow.py +++ b/homeassistant/components/vilfo/config_flow.py @@ -114,8 +114,8 @@ class DomainConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" except InvalidAuth: errors["base"] = "invalid_auth" - except Exception as err: # noqa: BLE001 - _LOGGER.error("Unexpected exception: %s", err) + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: await self.async_set_unique_id(info[CONF_ID]) diff --git a/homeassistant/components/vodafone_station/config_flow.py b/homeassistant/components/vodafone_station/config_flow.py index fd0683bdacc..6641f5f5711 100644 --- a/homeassistant/components/vodafone_station/config_flow.py +++ b/homeassistant/components/vodafone_station/config_flow.py @@ -139,6 +139,47 @@ class VodafoneStationConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration of the device.""" + reconfigure_entry = self._get_reconfigure_entry() + if not user_input: + return self.async_show_form( + step_id="reconfigure", data_schema=user_form_schema(user_input) + ) + + updated_host = user_input[CONF_HOST] + + if reconfigure_entry.data[CONF_HOST] != updated_host: + self._async_abort_entries_match({CONF_HOST: updated_host}) + + errors: dict[str, str] = {} + + errors = {} + + try: + await validate_input(self.hass, user_input) + except aiovodafone_exceptions.AlreadyLogged: + errors["base"] = "already_logged" + except aiovodafone_exceptions.CannotConnect: + errors["base"] = "cannot_connect" + except aiovodafone_exceptions.CannotAuthenticate: + errors["base"] = "invalid_auth" + except Exception: # noqa: BLE001 + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + return self.async_update_reload_and_abort( + reconfigure_entry, data_updates={CONF_HOST: updated_host} + ) + + return self.async_show_form( + step_id="reconfigure", + data_schema=user_form_schema(user_input), + errors=errors, + ) + class VodafoneStationOptionsFlowHandler(OptionsFlow): """Handle a option flow.""" diff --git a/homeassistant/components/vodafone_station/quality_scale.yaml b/homeassistant/components/vodafone_station/quality_scale.yaml index fe114b4b324..d60020f5e47 100644 --- a/homeassistant/components/vodafone_station/quality_scale.yaml +++ b/homeassistant/components/vodafone_station/quality_scale.yaml @@ -47,20 +47,14 @@ rules: status: exempt comment: device not discoverable docs-data-update: done - docs-examples: - status: todo - comment: add some automation example + docs-examples: done docs-known-limitations: status: exempt comment: no known limitations, yet docs-supported-devices: done docs-supported-functions: done - docs-troubleshooting: - status: todo - comment: add some info for troubleshooting - docs-use-cases: - status: todo - comment: add some use caes + docs-troubleshooting: done + docs-use-cases: done dynamic-devices: done entity-category: done entity-device-class: done @@ -70,9 +64,7 @@ rules: entity-translations: done exception-translations: done icon-translations: done - reconfiguration-flow: - status: todo - comment: handle host change + reconfiguration-flow: done repair-issues: status: exempt comment: no known use cases for repair issues or flows, yet diff --git a/homeassistant/components/vodafone_station/strings.json b/homeassistant/components/vodafone_station/strings.json index 6e308c35e4f..958b774a485 100644 --- a/homeassistant/components/vodafone_station/strings.json +++ b/homeassistant/components/vodafone_station/strings.json @@ -21,12 +21,25 @@ "username": "The username for your Vodafone Station.", "password": "The password for your Vodafone Station." } + }, + "reconfigure": { + "data": { + "host": "[%key:common::config_flow::data::host%]", + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "host": "[%key:component::vodafone_station::config::step::user::data_description::host%]", + "username": "[%key:component::vodafone_station::config::step::user::data_description::username%]", + "password": "[%key:component::vodafone_station::config::step::user::data_description::password%]" + } } }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", "already_logged": "User already logged-in, please try again later.", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "model_not_supported": "The device model is currently unsupported.", diff --git a/homeassistant/components/watergate/__init__.py b/homeassistant/components/watergate/__init__.py index c1747af1f11..fd591215d8b 100644 --- a/homeassistant/components/watergate/__init__.py +++ b/homeassistant/components/watergate/__init__.py @@ -18,8 +18,9 @@ from homeassistant.components.webhook import ( ) from homeassistant.const import CONF_IP_ADDRESS, CONF_WEBHOOK_ID, Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers.dispatcher import async_dispatcher_send -from .const import DOMAIN +from .const import AUTO_SHUT_OFF_EVENT_NAME, DOMAIN from .coordinator import WatergateConfigEntry, WatergateDataCoordinator _LOGGER = logging.getLogger(__name__) @@ -28,8 +29,10 @@ WEBHOOK_TELEMETRY_TYPE = "telemetry" WEBHOOK_VALVE_TYPE = "valve" WEBHOOK_WIFI_CHANGED_TYPE = "wifi-changed" WEBHOOK_POWER_SUPPLY_CHANGED_TYPE = "power-supply-changed" +WEBHOOK_AUTO_SHUT_OFF = "auto-shut-off-report" PLATFORMS: list[Platform] = [ + Platform.EVENT, Platform.SENSOR, Platform.VALVE, ] @@ -120,6 +123,10 @@ def get_webhook_handler( coordinator_data.networking.rssi = data.rssi elif body_type == WEBHOOK_POWER_SUPPLY_CHANGED_TYPE: coordinator_data.state.power_supply = data.supply + elif body_type == WEBHOOK_AUTO_SHUT_OFF: + async_dispatcher_send( + hass, AUTO_SHUT_OFF_EVENT_NAME.format(data.type.lower()), data + ) coordinator.async_set_updated_data(coordinator_data) diff --git a/homeassistant/components/watergate/const.py b/homeassistant/components/watergate/const.py index 22a14330af9..c6726d9185f 100644 --- a/homeassistant/components/watergate/const.py +++ b/homeassistant/components/watergate/const.py @@ -3,3 +3,5 @@ DOMAIN = "watergate" MANUFACTURER = "Watergate" + +AUTO_SHUT_OFF_EVENT_NAME = "watergate_{}" diff --git a/homeassistant/components/watergate/event.py b/homeassistant/components/watergate/event.py new file mode 100644 index 00000000000..cf2447df4b3 --- /dev/null +++ b/homeassistant/components/watergate/event.py @@ -0,0 +1,78 @@ +"""Module contains the AutoShutOffEvent class for handling auto shut off events.""" + +from watergate_local_api.models.auto_shut_off_report import AutoShutOffReport + +from homeassistant.components.event import EventEntity, EventEntityDescription +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import WatergateConfigEntry +from .const import AUTO_SHUT_OFF_EVENT_NAME +from .coordinator import WatergateDataCoordinator +from .entity import WatergateEntity + +VOLUME_AUTO_SHUT_OFF = "volume_threshold" +DURATION_AUTO_SHUT_OFF = "duration_threshold" + + +DESCRIPTIONS: list[EventEntityDescription] = [ + EventEntityDescription( + translation_key="auto_shut_off_volume", + key="auto_shut_off_volume", + event_types=[VOLUME_AUTO_SHUT_OFF], + ), + EventEntityDescription( + translation_key="auto_shut_off_duration", + key="auto_shut_off_duration", + event_types=[DURATION_AUTO_SHUT_OFF], + ), +] + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: WatergateConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up Event entities from config entry.""" + + coordinator = config_entry.runtime_data + + async_add_entities( + AutoShutOffEvent(coordinator, description) for description in DESCRIPTIONS + ) + + +class AutoShutOffEvent(WatergateEntity, EventEntity): + """Event for Auto Shut Off.""" + + def __init__( + self, + coordinator: WatergateDataCoordinator, + entity_description: EventEntityDescription, + ) -> None: + """Initialize Auto Shut Off Entity.""" + super().__init__(coordinator, entity_description.key) + self.entity_description = entity_description + + async def async_added_to_hass(self): + """Register the callback for event handling when the entity is added.""" + await super().async_added_to_hass() + self.async_on_remove( + async_dispatcher_connect( + self.hass, + AUTO_SHUT_OFF_EVENT_NAME.format(self.event_types[0]), + self._async_handle_event, + ) + ) + + @callback + def _async_handle_event(self, event: AutoShutOffReport) -> None: + self._trigger_event( + event.type.lower(), + {"volume": event.volume, "duration": event.duration}, + ) + self.async_write_ha_state() diff --git a/homeassistant/components/watergate/icons.json b/homeassistant/components/watergate/icons.json new file mode 100644 index 00000000000..28a0bfbc825 --- /dev/null +++ b/homeassistant/components/watergate/icons.json @@ -0,0 +1,12 @@ +{ + "entity": { + "event": { + "auto_shut_off_volume": { + "default": "mdi:water" + }, + "auto_shut_off_duration": { + "default": "mdi:timelapse" + } + } + } +} diff --git a/homeassistant/components/watergate/quality_scale.yaml b/homeassistant/components/watergate/quality_scale.yaml index b116eff970e..73a39bd5264 100644 --- a/homeassistant/components/watergate/quality_scale.yaml +++ b/homeassistant/components/watergate/quality_scale.yaml @@ -17,10 +17,7 @@ rules: docs-high-level-description: done docs-installation-instructions: done docs-removal-instructions: done - entity-event-setup: - status: exempt - comment: | - Entities of this integration does not explicitly subscribe to events. + entity-event-setup: done entity-unique-id: done has-entity-name: done runtime-data: done diff --git a/homeassistant/components/watergate/strings.json b/homeassistant/components/watergate/strings.json index c312525e420..634e05e7973 100644 --- a/homeassistant/components/watergate/strings.json +++ b/homeassistant/components/watergate/strings.json @@ -19,6 +19,42 @@ } }, "entity": { + "event": { + "auto_shut_off_volume": { + "name": "Volume auto shut-off", + "state_attributes": { + "event_type": { + "state": { + "volume_threshold": "Volume", + "duration_threshold": "Duration" + } + }, + "volume": { + "name": "[%key:component::watergate::entity::event::auto_shut_off_volume::state_attributes::event_type::state::volume_threshold%]" + }, + "duration": { + "name": "[%key:component::watergate::entity::event::auto_shut_off_volume::state_attributes::event_type::state::duration_threshold%]" + } + } + }, + "auto_shut_off_duration": { + "name": "Duration auto shut-off", + "state_attributes": { + "event_type": { + "state": { + "volume_threshold": "[%key:component::watergate::entity::event::auto_shut_off_volume::state_attributes::event_type::state::volume_threshold%]", + "duration_threshold": "[%key:component::watergate::entity::event::auto_shut_off_volume::state_attributes::event_type::state::duration_threshold%]" + } + }, + "volume": { + "name": "[%key:component::watergate::entity::event::auto_shut_off_volume::state_attributes::event_type::state::volume_threshold%]" + }, + "duration": { + "name": "[%key:component::watergate::entity::event::auto_shut_off_volume::state_attributes::event_type::state::duration_threshold%]" + } + } + } + }, "sensor": { "water_meter_volume": { "name": "Water meter volume" diff --git a/homeassistant/components/weatherflow/sensor.py b/homeassistant/components/weatherflow/sensor.py index 8eee472fe5c..10c04b3283b 100644 --- a/homeassistant/components/weatherflow/sensor.py +++ b/homeassistant/components/weatherflow/sensor.py @@ -268,6 +268,7 @@ SENSORS: tuple[WeatherFlowSensorEntityDescription, ...] = ( key="wind_direction", translation_key="wind_direction", device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, native_unit_of_measurement=DEGREE, event_subscriptions=[EVENT_RAPID_WIND, EVENT_OBSERVATION], raw_data_conv_fn=lambda raw_data: raw_data.magnitude, diff --git a/homeassistant/components/webdav/config_flow.py b/homeassistant/components/webdav/config_flow.py index fa1a4fe3ca9..e3e46d2575a 100644 --- a/homeassistant/components/webdav/config_flow.py +++ b/homeassistant/components/webdav/config_flow.py @@ -67,7 +67,7 @@ class WebDavConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_auth" except MethodNotSupportedError: errors["base"] = "invalid_method" - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unexpected error") errors["base"] = "unknown" else: diff --git a/homeassistant/components/webdav/manifest.json b/homeassistant/components/webdav/manifest.json index 30028cb28c9..260c569b72b 100644 --- a/homeassistant/components/webdav/manifest.json +++ b/homeassistant/components/webdav/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_polling", "loggers": ["aiowebdav2"], "quality_scale": "bronze", - "requirements": ["aiowebdav2==0.4.2"] + "requirements": ["aiowebdav2==0.4.4"] } diff --git a/homeassistant/components/websocket_api/const.py b/homeassistant/components/websocket_api/const.py index a0d031834ae..fce85339430 100644 --- a/homeassistant/components/websocket_api/const.py +++ b/homeassistant/components/websocket_api/const.py @@ -21,7 +21,7 @@ type AsyncWebSocketCommandHandler = Callable[ DOMAIN: Final = "websocket_api" URL: Final = "/api/websocket" PENDING_MSG_PEAK: Final = 1024 -PENDING_MSG_PEAK_TIME: Final = 5 +PENDING_MSG_PEAK_TIME: Final = 10 # Maximum number of messages that can be pending at any given time. # This is effectively the upper limit of the number of entities # that can fire state changes within ~1 second. diff --git a/homeassistant/components/websocket_api/messages.py b/homeassistant/components/websocket_api/messages.py index 0a8200c5700..6ae7de2c4b7 100644 --- a/homeassistant/components/websocket_api/messages.py +++ b/homeassistant/components/websocket_api/messages.py @@ -207,7 +207,7 @@ def _state_diff_event( additions[COMPRESSED_STATE_STATE] = new_state.state if old_state.last_changed != new_state.last_changed: additions[COMPRESSED_STATE_LAST_CHANGED] = new_state.last_changed_timestamp - elif old_state.last_updated != new_state.last_updated: + elif old_state.last_updated_timestamp != new_state.last_updated_timestamp: additions[COMPRESSED_STATE_LAST_UPDATED] = new_state.last_updated_timestamp if old_state_context.parent_id != new_state_context.parent_id: additions[COMPRESSED_STATE_CONTEXT] = {"parent_id": new_state_context.parent_id} diff --git a/homeassistant/components/whois/config_flow.py b/homeassistant/components/whois/config_flow.py index cb4326d996d..a8306be7632 100644 --- a/homeassistant/components/whois/config_flow.py +++ b/homeassistant/components/whois/config_flow.py @@ -11,6 +11,8 @@ from whois.exceptions import ( UnknownDateFormat, UnknownTld, WhoisCommandFailed, + WhoisPrivateRegistry, + WhoisQuotaExceeded, ) from homeassistant.config_entries import ConfigFlow, ConfigFlowResult @@ -48,6 +50,10 @@ class WhoisFlowHandler(ConfigFlow, domain=DOMAIN): errors["base"] = "unexpected_response" except UnknownDateFormat: errors["base"] = "unknown_date_format" + except WhoisPrivateRegistry: + errors["base"] = "private_registry" + except WhoisQuotaExceeded: + errors["base"] = "quota_exceeded" else: return self.async_create_entry( title=self.imported_name or user_input[CONF_DOMAIN], diff --git a/homeassistant/components/whois/strings.json b/homeassistant/components/whois/strings.json index c28c079784d..3b0f9dfd4d1 100644 --- a/homeassistant/components/whois/strings.json +++ b/homeassistant/components/whois/strings.json @@ -11,7 +11,9 @@ "unexpected_response": "Unexpected response from whois server", "unknown_date_format": "Unknown date format in whois server response", "unknown_tld": "The given TLD is unknown or not available to this integration", - "whois_command_failed": "Whois command failed: could not retrieve whois information" + "whois_command_failed": "Whois command failed: could not retrieve whois information", + "private_registry": "The given domain is registered in a private registry and cannot be monitored", + "quota_exceeded": "Your whois quota has been exceeded for this TLD" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" diff --git a/homeassistant/components/wiz/manifest.json b/homeassistant/components/wiz/manifest.json index 7b1ecdcdb6b..947e7f0b638 100644 --- a/homeassistant/components/wiz/manifest.json +++ b/homeassistant/components/wiz/manifest.json @@ -26,5 +26,5 @@ ], "documentation": "https://www.home-assistant.io/integrations/wiz", "iot_class": "local_push", - "requirements": ["pywizlight==0.5.14"] + "requirements": ["pywizlight==0.6.2"] } diff --git a/homeassistant/components/wled/select.py b/homeassistant/components/wled/select.py index e340c323151..76837652ae5 100644 --- a/homeassistant/components/wled/select.py +++ b/homeassistant/components/wled/select.py @@ -79,9 +79,10 @@ class WLEDPresetSelect(WLEDEntity, SelectEntity): super().__init__(coordinator=coordinator) self._attr_unique_id = f"{coordinator.data.info.mac_address}_preset" - self._attr_options = [ - preset.name for preset in self.coordinator.data.presets.values() - ] + sorted_values = sorted( + coordinator.data.presets.values(), key=lambda preset: preset.name + ) + self._attr_options = [preset.name for preset in sorted_values] @property def available(self) -> bool: @@ -115,9 +116,10 @@ class WLEDPlaylistSelect(WLEDEntity, SelectEntity): super().__init__(coordinator=coordinator) self._attr_unique_id = f"{coordinator.data.info.mac_address}_playlist" - self._attr_options = [ - playlist.name for playlist in self.coordinator.data.playlists.values() - ] + sorted_values = sorted( + coordinator.data.playlists.values(), key=lambda playlist: playlist.name + ) + self._attr_options = [playlist.name for playlist in sorted_values] @property def available(self) -> bool: @@ -159,9 +161,10 @@ class WLEDPaletteSelect(WLEDEntity, SelectEntity): self._attr_translation_placeholders = {"segment": str(segment)} self._attr_unique_id = f"{coordinator.data.info.mac_address}_palette_{segment}" - self._attr_options = [ - palette.name for palette in self.coordinator.data.palettes.values() - ] + sorted_values = sorted( + coordinator.data.palettes.values(), key=lambda palette: palette.name + ) + self._attr_options = [palette.name for palette in sorted_values] self._segment = segment @property diff --git a/homeassistant/components/wolflink/strings.json b/homeassistant/components/wolflink/strings.json index b1c332984a1..1f1eb5e310d 100644 --- a/homeassistant/components/wolflink/strings.json +++ b/homeassistant/components/wolflink/strings.json @@ -28,9 +28,9 @@ "sensor": { "state": { "state": { - "ein": "[%key:common::state::enabled%]", - "deaktiviert": "Inactive", - "aus": "[%key:common::state::disabled%]", + "ein": "[%key:common::state::on%]", + "deaktiviert": "[%key:common::state::disabled%]", + "aus": "[%key:common::state::off%]", "standby": "[%key:common::state::standby%]", "auto": "Auto", "permanent": "Permanent", diff --git a/homeassistant/components/workday/manifest.json b/homeassistant/components/workday/manifest.json index cc6b0f30002..b08a5ed9fff 100644 --- a/homeassistant/components/workday/manifest.json +++ b/homeassistant/components/workday/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_polling", "loggers": ["holidays"], "quality_scale": "internal", - "requirements": ["holidays==0.68"] + "requirements": ["holidays==0.69"] } diff --git a/homeassistant/components/workday/strings.json b/homeassistant/components/workday/strings.json index 87fa294dbba..feedc52331b 100644 --- a/homeassistant/components/workday/strings.json +++ b/homeassistant/components/workday/strings.json @@ -2,13 +2,13 @@ "title": "Workday", "config": { "abort": { - "already_configured": "Workday has already been setup with chosen configuration" + "already_configured": "Workday has already been set up with chosen configuration" }, "step": { "user": { "data": { "name": "[%key:common::config_flow::data::name%]", - "country": "Country" + "country": "[%key:common::config_flow::data::country%]" } }, "options": { @@ -18,7 +18,7 @@ "days_offset": "Offset", "workdays": "Days to include", "add_holidays": "Add holidays", - "remove_holidays": "Remove Holidays", + "remove_holidays": "Remove holidays", "province": "Subdivision of country", "language": "Language for named holidays", "category": "Additional category as holiday" @@ -116,14 +116,14 @@ }, "issues": { "bad_country": { - "title": "Configured Country for {title} does not exist", + "title": "Configured country for {title} does not exist", "fix_flow": { "step": { "country": { "title": "Select country for {title}", "description": "Select a country to use for your Workday sensor.", "data": { - "country": "[%key:component::workday::config::step::user::data::country%]" + "country": "[%key:common::config_flow::data::country%]" } }, "province": { @@ -133,7 +133,7 @@ "province": "[%key:component::workday::config::step::options::data::province%]" }, "data_description": { - "province": "State, Territory, Province, Region of Country" + "province": "[%key:component::workday::config::step::options::data_description::province%]" } } } @@ -150,7 +150,7 @@ "province": "[%key:component::workday::config::step::options::data::province%]" }, "data_description": { - "province": "[%key:component::workday::issues::bad_country::fix_flow::step::province::data_description::province%]" + "province": "[%key:component::workday::config::step::options::data_description::province%]" } } } @@ -217,7 +217,7 @@ "services": { "check_date": { "name": "Check date", - "description": "Check if date is workday.", + "description": "Checks if a given date is a workday.", "fields": { "check_date": { "name": "Date", diff --git a/homeassistant/components/wyoming/__init__.py b/homeassistant/components/wyoming/__init__.py index d639933ece6..4e76287d8e7 100644 --- a/homeassistant/components/wyoming/__init__.py +++ b/homeassistant/components/wyoming/__init__.py @@ -8,15 +8,19 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import device_registry as dr +from homeassistant.helpers import config_validation as cv, device_registry as dr +from homeassistant.helpers.typing import ConfigType from .const import ATTR_SPEAKER, DOMAIN from .data import WyomingService from .devices import SatelliteDevice from .models import DomainDataItem +from .websocket_api import async_register_websocket_api _LOGGER = logging.getLogger(__name__) +CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) + SATELLITE_PLATFORMS = [ Platform.ASSIST_SATELLITE, Platform.BINARY_SENSOR, @@ -28,11 +32,19 @@ SATELLITE_PLATFORMS = [ __all__ = [ "ATTR_SPEAKER", "DOMAIN", + "async_setup", "async_setup_entry", "async_unload_entry", ] +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the Wyoming integration.""" + async_register_websocket_api(hass) + + return True + + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Load Wyoming.""" service = await WyomingService.create(entry.data["host"], entry.data["port"]) diff --git a/homeassistant/components/wyoming/strings.json b/homeassistant/components/wyoming/strings.json index 4a1a4c3a246..4480b00d867 100644 --- a/homeassistant/components/wyoming/strings.json +++ b/homeassistant/components/wyoming/strings.json @@ -40,7 +40,7 @@ "noise_suppression_level": { "name": "Noise suppression level", "state": { - "off": "Off", + "off": "[%key:common::state::off%]", "low": "Low", "medium": "Medium", "high": "High", diff --git a/homeassistant/components/wyoming/websocket_api.py b/homeassistant/components/wyoming/websocket_api.py new file mode 100644 index 00000000000..613238c302a --- /dev/null +++ b/homeassistant/components/wyoming/websocket_api.py @@ -0,0 +1,42 @@ +"""Wyoming Websocket API.""" + +import logging +from typing import Any + +import voluptuous as vol + +from homeassistant.components import websocket_api +from homeassistant.core import HomeAssistant, callback + +from .const import DOMAIN +from .models import DomainDataItem + +_LOGGER = logging.getLogger(__name__) + + +@callback +def async_register_websocket_api(hass: HomeAssistant) -> None: + """Register the websocket API.""" + websocket_api.async_register_command(hass, websocket_info) + + +@callback +@websocket_api.require_admin +@websocket_api.websocket_command({vol.Required("type"): "wyoming/info"}) +def websocket_info( + hass: HomeAssistant, + connection: websocket_api.connection.ActiveConnection, + msg: dict[str, Any], +) -> None: + """List service information for Wyoming all config entries.""" + entry_items: dict[str, DomainDataItem] = hass.data.get(DOMAIN, {}) + + connection.send_result( + msg["id"], + { + "info": { + entry_id: item.service.info.to_dict() + for entry_id, item in entry_items.items() + } + }, + ) diff --git a/homeassistant/components/yolink/manifest.json b/homeassistant/components/yolink/manifest.json index 52ae8281f59..8c297c68670 100644 --- a/homeassistant/components/yolink/manifest.json +++ b/homeassistant/components/yolink/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["auth", "application_credentials"], "documentation": "https://www.home-assistant.io/integrations/yolink", "iot_class": "cloud_push", - "requirements": ["yolink-api==0.4.8"] + "requirements": ["yolink-api==0.4.9"] } diff --git a/homeassistant/components/youtube/config_flow.py b/homeassistant/components/youtube/config_flow.py index 48336422585..76d74965b34 100644 --- a/homeassistant/components/youtube/config_flow.py +++ b/homeassistant/components/youtube/config_flow.py @@ -7,7 +7,6 @@ import logging from typing import Any import voluptuous as vol -from youtubeaio.helper import first from youtubeaio.types import AuthScope, ForbiddenError from youtubeaio.youtube import YouTube @@ -96,8 +95,12 @@ class OAuth2FlowHandler( """Create an entry for the flow, or update existing entry.""" try: youtube = await self.get_resource(data[CONF_TOKEN][CONF_ACCESS_TOKEN]) - own_channel = await first(youtube.get_user_channels()) - if own_channel is None or own_channel.snippet is None: + own_channels = [ + channel + async for channel in youtube.get_user_channels() + if channel.snippet is not None + ] + if not own_channels: return self.async_abort( reason="no_channel", description_placeholders={"support_url": CHANNEL_CREATION_HELP_URL}, @@ -111,10 +114,10 @@ class OAuth2FlowHandler( except Exception as ex: # noqa: BLE001 LOGGER.error("Unknown error occurred: %s", ex.args) return self.async_abort(reason="unknown") - self._title = own_channel.snippet.title + self._title = own_channels[0].snippet.title self._data = data - await self.async_set_unique_id(own_channel.channel_id) + await self.async_set_unique_id(own_channels[0].channel_id) if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() @@ -138,13 +141,39 @@ class OAuth2FlowHandler( options=user_input, ) youtube = await self.get_resource(self._data[CONF_TOKEN][CONF_ACCESS_TOKEN]) + + # Get user's own channels + own_channels = [ + channel + async for channel in youtube.get_user_channels() + if channel.snippet is not None + ] + if not own_channels: + return self.async_abort( + reason="no_channel", + description_placeholders={"support_url": CHANNEL_CREATION_HELP_URL}, + ) + + # Start with user's own channels selectable_channels = [ SelectOptionDict( - value=subscription.snippet.channel_id, - label=subscription.snippet.title, + value=channel.channel_id, + label=f"{channel.snippet.title} (Your Channel)", ) - async for subscription in youtube.get_user_subscriptions() + for channel in own_channels ] + + # Add subscribed channels + selectable_channels.extend( + [ + SelectOptionDict( + value=subscription.snippet.channel_id, + label=subscription.snippet.title, + ) + async for subscription in youtube.get_user_subscriptions() + ] + ) + if not selectable_channels: return self.async_abort(reason="no_subscriptions") return self.async_show_form( @@ -175,13 +204,39 @@ class YouTubeOptionsFlowHandler(OptionsFlow): await youtube.set_user_authentication( self.config_entry.data[CONF_TOKEN][CONF_ACCESS_TOKEN], [AuthScope.READ_ONLY] ) + + # Get user's own channels + own_channels = [ + channel + async for channel in youtube.get_user_channels() + if channel.snippet is not None + ] + if not own_channels: + return self.async_abort( + reason="no_channel", + description_placeholders={"support_url": CHANNEL_CREATION_HELP_URL}, + ) + + # Start with user's own channels selectable_channels = [ SelectOptionDict( - value=subscription.snippet.channel_id, - label=subscription.snippet.title, + value=channel.channel_id, + label=f"{channel.snippet.title} (Your Channel)", ) - async for subscription in youtube.get_user_subscriptions() + for channel in own_channels ] + + # Add subscribed channels + selectable_channels.extend( + [ + SelectOptionDict( + value=subscription.snippet.channel_id, + label=subscription.snippet.title, + ) + async for subscription in youtube.get_user_subscriptions() + ] + ) + return self.async_show_form( step_id="init", data_schema=self.add_suggested_values_to_schema( diff --git a/homeassistant/components/zamg/sensor.py b/homeassistant/components/zamg/sensor.py index 5846092e555..fdb9d51185c 100644 --- a/homeassistant/components/zamg/sensor.py +++ b/homeassistant/components/zamg/sensor.py @@ -82,7 +82,8 @@ SENSOR_TYPES: tuple[ZamgSensorEntityDescription, ...] = ( key="wind_bearing", name="Wind Bearing", native_unit_of_measurement=DEGREE, - state_class=SensorStateClass.MEASUREMENT, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + device_class=SensorDeviceClass.WIND_DIRECTION, para_name="DD", ), ZamgSensorEntityDescription( diff --git a/homeassistant/components/zengge/light.py b/homeassistant/components/zengge/light.py index 2ab46820b56..ccb6733c650 100644 --- a/homeassistant/components/zengge/light.py +++ b/homeassistant/components/zengge/light.py @@ -2,138 +2,38 @@ from __future__ import annotations -import logging -from typing import Any - import voluptuous as vol -from zengge import zengge -from homeassistant.components.light import ( - ATTR_BRIGHTNESS, - ATTR_HS_COLOR, - ATTR_WHITE, - PLATFORM_SCHEMA as LIGHT_PLATFORM_SCHEMA, - ColorMode, - LightEntity, -) +from homeassistant.components.light import PLATFORM_SCHEMA as LIGHT_PLATFORM_SCHEMA from homeassistant.const import CONF_DEVICES, CONF_NAME from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_validation as cv +from homeassistant.helpers import config_validation as cv, issue_registry as ir from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.util import color as color_util - -_LOGGER = logging.getLogger(__name__) DEVICE_SCHEMA = vol.Schema({vol.Optional(CONF_NAME): cv.string}) +DOMAIN = "zengge" PLATFORM_SCHEMA = LIGHT_PLATFORM_SCHEMA.extend( {vol.Optional(CONF_DEVICES, default={}): {cv.string: DEVICE_SCHEMA}} ) -def setup_platform( +def async_setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Zengge platform.""" - lights = [] - for address, device_config in config[CONF_DEVICES].items(): - light = ZenggeLight(device_config[CONF_NAME], address) - if light.is_valid: - lights.append(light) - - add_entities(lights, True) - - -class ZenggeLight(LightEntity): - """Representation of a Zengge light.""" - - _attr_supported_color_modes = {ColorMode.HS, ColorMode.WHITE} - - def __init__(self, name: str, address: str) -> None: - """Initialize the light.""" - - self._attr_name = name - self._attr_unique_id = address - self.is_valid = True - self._bulb = zengge(address) - self._white = 0 - self._attr_brightness = 0 - self._attr_hs_color = (0, 0) - self._attr_is_on = False - if self._bulb.connect() is False: - self.is_valid = False - _LOGGER.error("Failed to connect to bulb %s, %s", address, name) - return - - @property - def white_value(self) -> int: - """Return the white property.""" - return self._white - - @property - def color_mode(self) -> ColorMode: - """Return the current color mode.""" - if self._white != 0: - return ColorMode.WHITE - return ColorMode.HS - - def _set_rgb(self, red: int, green: int, blue: int) -> None: - """Set the rgb state.""" - self._bulb.set_rgb(red, green, blue) - - def _set_white(self, white): - """Set the white state.""" - return self._bulb.set_white(white) - - def turn_on(self, **kwargs: Any) -> None: - """Turn the specified light on.""" - self._attr_is_on = True - self._bulb.on() - - hs_color = kwargs.get(ATTR_HS_COLOR) - white = kwargs.get(ATTR_WHITE) - brightness = kwargs.get(ATTR_BRIGHTNESS) - - if white is not None: - # Change the bulb to white - self._attr_brightness = white - self._white = white - self._attr_hs_color = (0, 0) - - if hs_color is not None: - # Change the bulb to hs - self._white = 0 - self._attr_hs_color = hs_color - - if brightness is not None: - self._attr_brightness = brightness - - if self._white != 0: - self._set_white(self.brightness) - else: - assert self.hs_color is not None - assert self.brightness is not None - rgb = color_util.color_hsv_to_RGB( - self.hs_color[0], self.hs_color[1], self.brightness / 255 * 100 - ) - self._set_rgb(*rgb) - - def turn_off(self, **kwargs: Any) -> None: - """Turn the specified light off.""" - self._attr_is_on = False - self._bulb.off() - - def update(self) -> None: - """Synchronise internal state with the actual light state.""" - rgb = self._bulb.get_colour() - hsv = color_util.color_RGB_to_hsv(*rgb) - self._attr_hs_color = hsv[:2] - self._attr_brightness = int((hsv[2] / 100) * 255) - self._white = self._bulb.get_white() - if self._white: - self._attr_brightness = self._white - self._attr_is_on = self._bulb.get_on() + ir.async_create_issue( + hass, + DOMAIN, + DOMAIN, + is_fixable=False, + severity=ir.IssueSeverity.ERROR, + translation_key="integration_removed", + translation_placeholders={ + "led_ble_url": "https://www.home-assistant.io/integrations/led_ble/", + }, + ) diff --git a/homeassistant/components/zengge/manifest.json b/homeassistant/components/zengge/manifest.json index 03d989c5f3b..daa63b4de3d 100644 --- a/homeassistant/components/zengge/manifest.json +++ b/homeassistant/components/zengge/manifest.json @@ -5,6 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/zengge", "iot_class": "local_polling", "loggers": ["zengge"], - "quality_scale": "legacy", - "requirements": ["bluepy==1.3.0", "zengge==0.2"] + "quality_scale": "legacy" } diff --git a/homeassistant/components/zengge/strings.json b/homeassistant/components/zengge/strings.json new file mode 100644 index 00000000000..abc3b2450aa --- /dev/null +++ b/homeassistant/components/zengge/strings.json @@ -0,0 +1,8 @@ +{ + "issues": { + "integration_removed": { + "title": "The Zengge integration has been removed", + "description": "The Zengge integration has been removed from Home Assistant. Support for Zengge lights is provided by the `led_ble` integration.\n\nTo resolve this issue, please remove the (now defunct) `zengge` light configuration from your Home Assistant configuration and [configure the `led_ble` integration]({led_ble_url})." + } + } +} diff --git a/homeassistant/components/zeroconf/__init__.py b/homeassistant/components/zeroconf/__init__.py index e80b6b8cfdb..86f8dbca792 100644 --- a/homeassistant/components/zeroconf/__init__.py +++ b/homeassistant/components/zeroconf/__init__.py @@ -145,8 +145,6 @@ def _async_get_instance(hass: HomeAssistant) -> HaAsyncZeroconf: if DOMAIN in hass.data: return cast(HaAsyncZeroconf, hass.data[DOMAIN]) - logging.getLogger("zeroconf").setLevel(logging.NOTSET) - zeroconf = HaZeroconf(**_async_get_zc_args(hass)) aio_zc = HaAsyncZeroconf(zc=zeroconf) diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index 6ed8b253e75..4daa2f2aa40 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -21,7 +21,7 @@ "zha", "universal_silabs_flasher" ], - "requirements": ["zha==0.0.53"], + "requirements": ["zha==0.0.54"], "usb": [ { "vid": "10C4", diff --git a/homeassistant/components/zha/strings.json b/homeassistant/components/zha/strings.json index 23bb9ae051e..79cb05c3a0e 100644 --- a/homeassistant/components/zha/strings.json +++ b/homeassistant/components/zha/strings.json @@ -610,6 +610,12 @@ }, "flow_switch": { "name": "Flow switch" + }, + "water_leak": { + "name": "Water leak" + }, + "water_supply": { + "name": "Water supply" } }, "button": { @@ -1101,6 +1107,27 @@ }, "shutdown_timer": { "name": "Shutdown timer" + }, + "calibration_vertical_run_time_up": { + "name": "Calibration vertical run time up" + }, + "calibration_vertical_run_time_down": { + "name": "Calibration vertical run time down" + }, + "calibration_rotation_run_time_up": { + "name": "Calibration rotation run time up" + }, + "calibration_rotation_run_time_down": { + "name": "Calibration rotation run time down" + }, + "impulse_mode_duration": { + "name": "Impulse mode duration" + }, + "water_duration": { + "name": "Water duration" + }, + "water_interval": { + "name": "Water interval" } }, "select": { @@ -1319,6 +1346,9 @@ }, "hysteresis_mode": { "name": "Hysteresis mode" + }, + "speed": { + "name": "Speed" } }, "sensor": { @@ -1457,7 +1487,7 @@ "adaptation_run_status": { "name": "Adaptation run status", "state": { - "nothing": "Idle", + "nothing": "[%key:common::state::idle%]", "something": "State" }, "state_attributes": { @@ -1666,6 +1696,9 @@ }, "last_watering_duration": { "name": "Last watering duration" + }, + "device_status": { + "name": "Device status" } }, "switch": { diff --git a/homeassistant/components/zwave_js/config_flow.py b/homeassistant/components/zwave_js/config_flow.py index aed0dd839be..d95f3208e17 100644 --- a/homeassistant/components/zwave_js/config_flow.py +++ b/homeassistant/components/zwave_js/config_flow.py @@ -21,19 +21,16 @@ from homeassistant.components.hassio import ( ) from homeassistant.config_entries import ( SOURCE_USB, - ConfigEntriesFlowManager, ConfigEntry, ConfigEntryBaseFlow, ConfigEntryState, ConfigFlow, - ConfigFlowContext, ConfigFlowResult, OptionsFlow, - OptionsFlowManager, ) from homeassistant.const import CONF_NAME, CONF_URL from homeassistant.core import HomeAssistant, callback -from homeassistant.data_entry_flow import AbortFlow, FlowManager +from homeassistant.data_entry_flow import AbortFlow from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.hassio import is_hassio @@ -191,11 +188,6 @@ class BaseZwaveJSFlow(ConfigEntryBaseFlow, ABC): self.start_task: asyncio.Task | None = None self.version_info: VersionInfo | None = None - @property - @abstractmethod - def flow_manager(self) -> FlowManager[ConfigFlowContext, ConfigFlowResult]: - """Return the flow manager of the flow.""" - async def async_step_install_addon( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -355,11 +347,6 @@ class ZWaveJSConfigFlow(BaseZwaveJSFlow, ConfigFlow, domain=DOMAIN): self.use_addon = False self._usb_discovery = False - @property - def flow_manager(self) -> ConfigEntriesFlowManager: - """Return the correct flow manager.""" - return self.hass.config_entries.flow - @staticmethod @callback def async_get_options_flow( @@ -729,11 +716,6 @@ class OptionsFlowHandler(BaseZwaveJSFlow, OptionsFlow): self.original_addon_config: dict[str, Any] | None = None self.revert_reason: str | None = None - @property - def flow_manager(self) -> OptionsFlowManager: - """Return the correct flow manager.""" - return self.hass.config_entries.options - @callback def _async_update_entry(self, data: dict[str, Any]) -> None: """Update the config entry with new data.""" diff --git a/homeassistant/components/zwave_js/light.py b/homeassistant/components/zwave_js/light.py index a610bbcb91e..f60e129cc77 100644 --- a/homeassistant/components/zwave_js/light.py +++ b/homeassistant/components/zwave_js/light.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import Any, cast +from typing import TYPE_CHECKING, Any, cast from zwave_js_server.client import Client as ZwaveClient from zwave_js_server.const import ( @@ -483,7 +483,7 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): red = multi_color.get(COLOR_SWITCH_COMBINED_RED, red_val.value) green = multi_color.get(COLOR_SWITCH_COMBINED_GREEN, green_val.value) blue = multi_color.get(COLOR_SWITCH_COMBINED_BLUE, blue_val.value) - if None not in (red, green, blue): + if red is not None and green is not None and blue is not None: # convert to HS self._hs_color = color_util.color_RGB_to_hs(red, green, blue) # Light supports color, set color mode to hs @@ -496,7 +496,8 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): # Calculate color temps based on whites if cold_white or warm_white: self._color_temp = color_util.color_temperature_mired_to_kelvin( - MAX_MIREDS - ((cold_white / 255) * (MAX_MIREDS - MIN_MIREDS)) + MAX_MIREDS + - ((cast(int, cold_white) / 255) * (MAX_MIREDS - MIN_MIREDS)) ) # White channels turned on, set color mode to color_temp self._color_mode = ColorMode.COLOR_TEMP @@ -505,6 +506,13 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): # only one white channel (warm white) = rgbw support elif red_val and green_val and blue_val and ww_val: white = multi_color.get(COLOR_SWITCH_COMBINED_WARM_WHITE, ww_val.value) + if TYPE_CHECKING: + assert ( + red is not None + and green is not None + and blue is not None + and white is not None + ) self._rgbw_color = (red, green, blue, white) # Light supports rgbw, set color mode to rgbw self._color_mode = ColorMode.RGBW @@ -512,6 +520,13 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): elif cw_val: self._supports_rgbw = True white = multi_color.get(COLOR_SWITCH_COMBINED_COLD_WHITE, cw_val.value) + if TYPE_CHECKING: + assert ( + red is not None + and green is not None + and blue is not None + and white is not None + ) self._rgbw_color = (red, green, blue, white) # Light supports rgbw, set color mode to rgbw self._color_mode = ColorMode.RGBW diff --git a/homeassistant/const.py b/homeassistant/const.py index b9695c350a7..a6f39db8532 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -24,7 +24,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2025 -MINOR_VERSION: Final = 4 +MINOR_VERSION: Final = 5 PATCH_VERSION: Final = "0.dev0" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" diff --git a/homeassistant/data_entry_flow.py b/homeassistant/data_entry_flow.py index 7d2ef09ecb8..f7be891b61b 100644 --- a/homeassistant/data_entry_flow.py +++ b/homeassistant/data_entry_flow.py @@ -666,7 +666,7 @@ class FlowHandler(Generic[_FlowContextT, _FlowResultT, _HandlerT]): new_section_key = copy.copy(key) schema[new_section_key] = val val.schema = self.add_suggested_values_to_schema( - copy.deepcopy(val.schema), suggested_values[key] + val.schema, suggested_values[key] ) continue diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index a9c4a6b0a93..d192b8fcd13 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -91,6 +91,7 @@ FLOWS = { "bluetooth", "bmw_connected_drive", "bond", + "bosch_alarm", "bosch_shc", "braviatv", "bring", @@ -488,6 +489,7 @@ FLOWS = { "proximity", "prusalink", "ps4", + "pterodactyl", "pure_energie", "purpleair", "pushbullet", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 55fcb08ba92..7bc76a28284 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -611,6 +611,13 @@ "config_flow": true, "iot_class": "local_push" }, + "backup": { + "name": "Backup", + "integration_type": "service", + "config_flow": false, + "iot_class": "calculated", + "single_config_entry": true + }, "baf": { "name": "Big Ass Fans", "integration_type": "hub", @@ -623,6 +630,11 @@ "config_flow": false, "iot_class": "cloud_push" }, + "balay": { + "name": "Balay", + "integration_type": "virtual", + "supported_by": "home_connect" + }, "balboa": { "name": "Balboa Spa Client", "integration_type": "hub", @@ -752,11 +764,28 @@ "config_flow": true, "iot_class": "local_push" }, - "bosch_shc": { - "name": "Bosch SHC", - "integration_type": "hub", - "config_flow": true, - "iot_class": "local_push" + "bosch": { + "name": "Bosch", + "integrations": { + "bosch_alarm": { + "integration_type": "device", + "config_flow": true, + "iot_class": "local_push", + "name": "Bosch Alarm" + }, + "bosch_shc": { + "integration_type": "hub", + "config_flow": true, + "iot_class": "local_push", + "name": "Bosch SHC" + }, + "home_connect": { + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_push", + "name": "Home Connect" + } + } }, "brandt": { "name": "Brandt Smart Control", @@ -1042,6 +1071,11 @@ "integration_type": "virtual", "supported_by": "opower" }, + "constructa": { + "name": "Constructa", + "integration_type": "virtual", + "supported_by": "home_connect" + }, "control4": { "name": "Control4", "integration_type": "hub", @@ -2150,6 +2184,11 @@ "config_flow": true, "iot_class": "cloud_polling" }, + "gaggenau": { + "name": "Gaggenau", + "integration_type": "virtual", + "supported_by": "home_connect" + }, "garadget": { "name": "Garadget", "integration_type": "hub", @@ -2489,6 +2528,12 @@ "config_flow": true, "iot_class": "cloud_polling" }, + "hardkernel": { + "name": "Hardkernel", + "integration_type": "hardware", + "config_flow": false, + "single_config_entry": true + }, "harman_kardon_avr": { "name": "Harman Kardon AVR", "integration_type": "hub", @@ -2620,18 +2665,28 @@ "config_flow": true, "iot_class": "local_polling" }, - "home_connect": { - "name": "Home Connect", - "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_push", - "single_config_entry": true - }, "home_plus_control": { "name": "Legrand Home+ Control", "integration_type": "virtual", "supported_by": "netatmo" }, + "homeassistant_green": { + "name": "Home Assistant Green", + "integration_type": "hardware", + "config_flow": false, + "single_config_entry": true + }, + "homeassistant_sky_connect": { + "name": "Home Assistant Connect ZBT-1", + "integration_type": "hardware", + "config_flow": true + }, + "homeassistant_yellow": { + "name": "Home Assistant Yellow", + "integration_type": "hardware", + "config_flow": false, + "single_config_entry": true + }, "homee": { "name": "Homee", "integration_type": "hub", @@ -4000,7 +4055,10 @@ "iot_class": "assumed_state", "name": "Motionblinds Bluetooth" } - } + }, + "iot_standards": [ + "matter" + ] }, "motioneye": { "name": "motionEye", @@ -4158,6 +4216,11 @@ "config_flow": false, "iot_class": "cloud_polling" }, + "neff": { + "name": "Neff", + "integration_type": "virtual", + "supported_by": "home_connect" + }, "ness_alarm": { "name": "Ness Alarm", "integration_type": "hub", @@ -4851,6 +4914,11 @@ "integration_type": "virtual", "supported_by": "wyoming" }, + "pitsos": { + "name": "Pitsos", + "integration_type": "virtual", + "supported_by": "home_connect" + }, "pjlink": { "name": "PJLink", "integration_type": "hub", @@ -4926,6 +4994,11 @@ "config_flow": true, "single_config_entry": true }, + "profilo": { + "name": "Profilo", + "integration_type": "virtual", + "supported_by": "home_connect" + }, "progettihwsw": { "name": "ProgettiHWSW Automation", "integration_type": "hub", @@ -4988,6 +5061,12 @@ "integration_type": "virtual", "supported_by": "opower" }, + "pterodactyl": { + "name": "Pterodactyl", + "integration_type": "hub", + "config_flow": true, + "iot_class": "local_polling" + }, "pulseaudio_loopback": { "name": "PulseAudio Loopback", "integration_type": "hub", @@ -5192,6 +5271,11 @@ "raspberry_pi": { "name": "Raspberry Pi", "integrations": { + "raspberry_pi": { + "integration_type": "hardware", + "config_flow": false, + "name": "Raspberry Pi" + }, "rpi_camera": { "integration_type": "hub", "config_flow": false, @@ -5727,6 +5811,11 @@ "config_flow": true, "iot_class": "local_push" }, + "siemens": { + "name": "Siemens", + "integration_type": "virtual", + "supported_by": "home_connect" + }, "sigfox": { "name": "Sigfox", "integration_type": "hub", @@ -6479,6 +6568,11 @@ "config_flow": false, "iot_class": "local_polling" }, + "thermador": { + "name": "Thermador", + "integration_type": "virtual", + "supported_by": "home_connect" + }, "thermobeacon": { "name": "ThermoBeacon", "integration_type": "hub", diff --git a/homeassistant/helpers/area_registry.py b/homeassistant/helpers/area_registry.py index 5601ce4032d..ba02ed51f6b 100644 --- a/homeassistant/helpers/area_registry.py +++ b/homeassistant/helpers/area_registry.py @@ -20,6 +20,7 @@ from .json import json_bytes, json_fragment from .normalized_name_base_registry import ( NormalizedNameBaseRegistryEntry, NormalizedNameBaseRegistryItems, + normalize_name, ) from .registry import BaseRegistry, RegistryIndexType from .singleton import singleton @@ -169,6 +170,7 @@ class AreaRegistryItems(NormalizedNameBaseRegistryItems[AreaEntry]): super().__init__() self._labels_index: RegistryIndexType = defaultdict(dict) self._floors_index: RegistryIndexType = defaultdict(dict) + self._aliases_index: RegistryIndexType = defaultdict(dict) def _index_entry(self, key: str, entry: AreaEntry) -> None: """Index an entry.""" @@ -177,6 +179,9 @@ class AreaRegistryItems(NormalizedNameBaseRegistryItems[AreaEntry]): self._floors_index[entry.floor_id][key] = True for label in entry.labels: self._labels_index[label][key] = True + for alias in entry.aliases: + normalized_alias = normalize_name(alias) + self._aliases_index[normalized_alias][key] = True def _unindex_entry( self, key: str, replacement_entry: AreaEntry | None = None @@ -184,6 +189,10 @@ class AreaRegistryItems(NormalizedNameBaseRegistryItems[AreaEntry]): # always call base class before other indices super()._unindex_entry(key, replacement_entry) entry = self.data[key] + if aliases := entry.aliases: + for alias in aliases: + normalized_alias = normalize_name(alias) + self._unindex_entry_value(key, normalized_alias, self._aliases_index) if labels := entry.labels: for label in labels: self._unindex_entry_value(key, label, self._labels_index) @@ -200,6 +209,12 @@ class AreaRegistryItems(NormalizedNameBaseRegistryItems[AreaEntry]): data = self.data return [data[key] for key in self._floors_index.get(floor, ())] + def get_areas_for_alias(self, alias: str) -> list[AreaEntry]: + """Get areas for alias.""" + data = self.data + normalized_alias = normalize_name(alias) + return [data[key] for key in self._aliases_index.get(normalized_alias, ())] + class AreaRegistry(BaseRegistry[AreasRegistryStoreData]): """Class to hold a registry of areas.""" @@ -232,6 +247,11 @@ class AreaRegistry(BaseRegistry[AreasRegistryStoreData]): """Get area by name.""" return self.areas.get_by_name(name) + @callback + def async_get_areas_by_alias(self, alias: str) -> list[AreaEntry]: + """Get areas by alias.""" + return self.areas.get_areas_for_alias(alias) + @callback def async_list_areas(self) -> Iterable[AreaEntry]: """Get all areas.""" diff --git a/homeassistant/helpers/backup.py b/homeassistant/helpers/backup.py index 4ab302749a1..b3607f6653c 100644 --- a/homeassistant/helpers/backup.py +++ b/homeassistant/helpers/backup.py @@ -12,7 +12,11 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.util.hass_dict import HassKey if TYPE_CHECKING: - from homeassistant.components.backup import BackupManager, ManagerStateEvent + from homeassistant.components.backup import ( + BackupManager, + BackupPlatformEvent, + ManagerStateEvent, + ) DATA_BACKUP: HassKey[BackupData] = HassKey("backup_data") DATA_MANAGER: HassKey[BackupManager] = HassKey("backup") @@ -25,6 +29,9 @@ class BackupData: backup_event_subscriptions: list[Callable[[ManagerStateEvent], None]] = field( default_factory=list ) + backup_platform_event_subscriptions: list[Callable[[BackupPlatformEvent], None]] = ( + field(default_factory=list) + ) manager_ready: asyncio.Future[None] = field(default_factory=asyncio.Future) @@ -68,3 +75,20 @@ def async_subscribe_events( backup_event_subscriptions.append(on_event) return remove_subscription + + +@callback +def async_subscribe_platform_events( + hass: HomeAssistant, + on_event: Callable[[BackupPlatformEvent], None], +) -> Callable[[], None]: + """Subscribe to backup platform events.""" + backup_platform_event_subscriptions = hass.data[ + DATA_BACKUP + ].backup_platform_event_subscriptions + + def remove_subscription() -> None: + backup_platform_event_subscriptions.remove(on_event) + + backup_platform_event_subscriptions.append(on_event) + return remove_subscription diff --git a/homeassistant/helpers/config_validation.py b/homeassistant/helpers/config_validation.py index 4978158c0f6..5c1a7c99565 100644 --- a/homeassistant/helpers/config_validation.py +++ b/homeassistant/helpers/config_validation.py @@ -1153,41 +1153,6 @@ def _custom_serializer(schema: Any, *, allow_section: bool) -> Any: return voluptuous_serialize.UNSUPPORTED -def expand_condition_shorthand(value: Any | None) -> Any: - """Expand boolean condition shorthand notations.""" - - if not isinstance(value, dict) or CONF_CONDITIONS in value: - return value - - for key, schema in ( - ("and", AND_CONDITION_SHORTHAND_SCHEMA), - ("or", OR_CONDITION_SHORTHAND_SCHEMA), - ("not", NOT_CONDITION_SHORTHAND_SCHEMA), - ): - try: - schema(value) - return { - CONF_CONDITION: key, - CONF_CONDITIONS: value[key], - **{k: value[k] for k in value if k != key}, - } - except vol.MultipleInvalid: - pass - - if isinstance(value.get(CONF_CONDITION), list): - try: - CONDITION_SHORTHAND_SCHEMA(value) - return { - CONF_CONDITION: "and", - CONF_CONDITIONS: value[CONF_CONDITION], - **{k: value[k] for k in value if k != CONF_CONDITION}, - } - except vol.MultipleInvalid: - pass - - return value - - # Schemas def empty_config_schema(domain: str) -> Callable[[dict], dict]: """Return a config schema which logs if there are configuration parameters.""" @@ -1683,7 +1648,43 @@ DEVICE_CONDITION_BASE_SCHEMA = vol.Schema( DEVICE_CONDITION_SCHEMA = DEVICE_CONDITION_BASE_SCHEMA.extend({}, extra=vol.ALLOW_EXTRA) -dynamic_template_condition_action = vol.All( + +def expand_condition_shorthand(value: Any | None) -> Any: + """Expand boolean condition shorthand notations.""" + + if not isinstance(value, dict) or CONF_CONDITIONS in value: + return value + + for key, schema in ( + ("and", AND_CONDITION_SHORTHAND_SCHEMA), + ("or", OR_CONDITION_SHORTHAND_SCHEMA), + ("not", NOT_CONDITION_SHORTHAND_SCHEMA), + ): + try: + schema(value) + return { + CONF_CONDITION: key, + CONF_CONDITIONS: value[key], + **{k: value[k] for k in value if k != key}, + } + except vol.MultipleInvalid: + pass + + if isinstance(value.get(CONF_CONDITION), list): + try: + CONDITION_SHORTHAND_SCHEMA(value) + return { + CONF_CONDITION: "and", + CONF_CONDITIONS: value[CONF_CONDITION], + **{k: value[k] for k in value if k != CONF_CONDITION}, + } + except vol.MultipleInvalid: + pass + + return value + + +dynamic_template_condition = vol.All( # Wrap a shorthand template condition in a template condition dynamic_template, lambda config: { @@ -1724,7 +1725,7 @@ CONDITION_SCHEMA: vol.Schema = vol.Schema( }, ), ), - dynamic_template_condition_action, + dynamic_template_condition, ) ) @@ -1873,12 +1874,8 @@ _SCRIPT_REPEAT_SCHEMA = vol.Schema( vol.Exclusive(CONF_FOR_EACH, "repeat"): vol.Any( dynamic_template, vol.All(list, template_complex) ), - vol.Exclusive(CONF_WHILE, "repeat"): vol.All( - ensure_list, [CONDITION_SCHEMA] - ), - vol.Exclusive(CONF_UNTIL, "repeat"): vol.All( - ensure_list, [CONDITION_SCHEMA] - ), + vol.Exclusive(CONF_WHILE, "repeat"): CONDITIONS_SCHEMA, + vol.Exclusive(CONF_UNTIL, "repeat"): CONDITIONS_SCHEMA, vol.Required(CONF_SEQUENCE): SCRIPT_SCHEMA, }, has_at_least_one_key(CONF_COUNT, CONF_FOR_EACH, CONF_WHILE, CONF_UNTIL), @@ -1894,9 +1891,7 @@ _SCRIPT_CHOOSE_SCHEMA = vol.Schema( [ { vol.Optional(CONF_ALIAS): string, - vol.Required(CONF_CONDITIONS): vol.All( - ensure_list, [CONDITION_SCHEMA] - ), + vol.Required(CONF_CONDITIONS): CONDITIONS_SCHEMA, vol.Required(CONF_SEQUENCE): SCRIPT_SCHEMA, } ], @@ -1917,7 +1912,7 @@ _SCRIPT_WAIT_FOR_TRIGGER_SCHEMA = vol.Schema( _SCRIPT_IF_SCHEMA = vol.Schema( { **SCRIPT_ACTION_BASE_SCHEMA, - vol.Required(CONF_IF): vol.All(ensure_list, [CONDITION_SCHEMA]), + vol.Required(CONF_IF): CONDITIONS_SCHEMA, vol.Required(CONF_THEN): SCRIPT_SCHEMA, vol.Optional(CONF_ELSE): SCRIPT_SCHEMA, } diff --git a/homeassistant/helpers/device_registry.py b/homeassistant/helpers/device_registry.py index 991a6cf5a57..79d6774c407 100644 --- a/homeassistant/helpers/device_registry.py +++ b/homeassistant/helpers/device_registry.py @@ -581,8 +581,8 @@ class DeviceRegistryItems[_EntryTypeT: (DeviceEntry, DeletedDeviceEntry)]( def get_entry( self, - identifiers: set[tuple[str, str]] | None, - connections: set[tuple[str, str]] | None, + identifiers: set[tuple[str, str]] | None = None, + connections: set[tuple[str, str]] | None = None, ) -> _EntryTypeT | None: """Get entry from identifiers or connections.""" if identifiers: @@ -709,22 +709,6 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): """Check if device is registered.""" return self.devices.get_entry(identifiers, connections) - def _async_get_deleted_device( - self, - identifiers: set[tuple[str, str]], - connections: set[tuple[str, str]], - ) -> DeletedDeviceEntry | None: - """Check if device is deleted.""" - return self.deleted_devices.get_entry(identifiers, connections) - - def _async_get_deleted_devices( - self, - identifiers: set[tuple[str, str]] | None = None, - connections: set[tuple[str, str]] | None = None, - ) -> Iterable[DeletedDeviceEntry]: - """List devices that are deleted.""" - return self.deleted_devices.get_entries(identifiers, connections) - def _substitute_name_placeholders( self, domain: str, @@ -839,10 +823,12 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): else: connections = _normalize_connections(connections) - device = self.async_get_device(identifiers=identifiers, connections=connections) + device = self.devices.get_entry( + identifiers=identifiers, connections=connections + ) if device is None: - deleted_device = self._async_get_deleted_device(identifiers, connections) + deleted_device = self.deleted_devices.get_entry(identifiers, connections) if deleted_device is None: device = DeviceEntry(is_new=True) else: @@ -869,7 +855,7 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): name = default_name if via_device is not None and via_device is not UNDEFINED: - if (via := self.async_get_device(identifiers={via_device})) is None: + if (via := self.devices.get_entry(identifiers={via_device})) is None: report_usage( "calls `device_registry.async_get_or_create` referencing a " f"non existing `via_device` {via_device}, " @@ -1172,7 +1158,7 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): # NOTE: Once we solve the broader issue of duplicated devices, we might # want to revisit it. Instead of simply removing the duplicated deleted device, # we might want to merge the information from it into the non-deleted device. - for deleted_device in self._async_get_deleted_devices( + for deleted_device in self.deleted_devices.get_entries( added_identifiers, added_connections ): del self.deleted_devices[deleted_device.id] @@ -1214,7 +1200,7 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): # conflict, the index will only see the last one and we will not # be able to tell which one caused the conflict if ( - existing_device := self.async_get_device(connections={connection}) + existing_device := self.devices.get_entry(connections={connection}) ) and existing_device.id != device_id: raise DeviceConnectionCollisionError( normalized_connections, existing_device @@ -1238,7 +1224,7 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): # conflict, the index will only see the last one and we will not # be able to tell which one caused the conflict if ( - existing_device := self.async_get_device(identifiers={identifier}) + existing_device := self.devices.get_entry(identifiers={identifier}) ) and existing_device.id != device_id: raise DeviceIdentifierCollisionError(identifiers, existing_device) diff --git a/homeassistant/helpers/floor_registry.py b/homeassistant/helpers/floor_registry.py index fcfca8e3212..186ad2b31f7 100644 --- a/homeassistant/helpers/floor_registry.py +++ b/homeassistant/helpers/floor_registry.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections import defaultdict from collections.abc import Iterable import dataclasses from dataclasses import dataclass @@ -16,8 +17,9 @@ from homeassistant.util.hass_dict import HassKey from .normalized_name_base_registry import ( NormalizedNameBaseRegistryEntry, NormalizedNameBaseRegistryItems, + normalize_name, ) -from .registry import BaseRegistry +from .registry import BaseRegistry, RegistryIndexType from .singleton import singleton from .storage import Store from .typing import UNDEFINED, UndefinedType @@ -92,10 +94,43 @@ class FloorRegistryStore(Store[FloorRegistryStoreData]): return old_data # type: ignore[return-value] +class FloorRegistryItems(NormalizedNameBaseRegistryItems[FloorEntry]): + """Class to hold floor registry items.""" + + def __init__(self) -> None: + """Initialize the floor registry items.""" + super().__init__() + self._aliases_index: RegistryIndexType = defaultdict(dict) + + def _index_entry(self, key: str, entry: FloorEntry) -> None: + """Index an entry.""" + super()._index_entry(key, entry) + for alias in entry.aliases: + normalized_alias = normalize_name(alias) + self._aliases_index[normalized_alias][key] = True + + def _unindex_entry( + self, key: str, replacement_entry: FloorEntry | None = None + ) -> None: + # always call base class before other indices + super()._unindex_entry(key, replacement_entry) + entry = self.data[key] + if aliases := entry.aliases: + for alias in aliases: + normalized_alias = normalize_name(alias) + self._unindex_entry_value(key, normalized_alias, self._aliases_index) + + def get_floors_for_alias(self, alias: str) -> list[FloorEntry]: + """Get floors for alias.""" + data = self.data + normalized_alias = normalize_name(alias) + return [data[key] for key in self._aliases_index.get(normalized_alias, ())] + + class FloorRegistry(BaseRegistry[FloorRegistryStoreData]): """Class to hold a registry of floors.""" - floors: NormalizedNameBaseRegistryItems[FloorEntry] + floors: FloorRegistryItems _floor_data: dict[str, FloorEntry] def __init__(self, hass: HomeAssistant) -> None: @@ -123,6 +158,11 @@ class FloorRegistry(BaseRegistry[FloorRegistryStoreData]): """Get floor by name.""" return self.floors.get_by_name(name) + @callback + def async_get_floors_by_alias(self, alias: str) -> list[FloorEntry]: + """Get floors by alias.""" + return self.floors.get_floors_for_alias(alias) + @callback def async_list_floors(self) -> Iterable[FloorEntry]: """Get all floors.""" @@ -226,7 +266,7 @@ class FloorRegistry(BaseRegistry[FloorRegistryStoreData]): async def async_load(self) -> None: """Load the floor registry.""" data = await self._store.async_load() - floors = NormalizedNameBaseRegistryItems[FloorEntry]() + floors = FloorRegistryItems() if data is not None: for floor in data["floors"]: diff --git a/homeassistant/helpers/httpx_client.py b/homeassistant/helpers/httpx_client.py index ade2ce747d5..49b12e0aa60 100644 --- a/homeassistant/helpers/httpx_client.py +++ b/homeassistant/helpers/httpx_client.py @@ -7,6 +7,9 @@ import sys from types import TracebackType from typing import Any, Self +# httpx dynamically imports httpcore, so we need to import it +# to avoid it being imported later when the event loop is running +import httpcore # noqa: F401 import httpx from homeassistant.const import APPLICATION_NAME, EVENT_HOMEASSISTANT_CLOSE, __version__ diff --git a/homeassistant/helpers/script.py b/homeassistant/helpers/script.py index 1242ef3e4d5..43429bdb1d2 100644 --- a/homeassistant/helpers/script.py +++ b/homeassistant/helpers/script.py @@ -1311,7 +1311,7 @@ class _QueuedScriptRun(_ScriptRun): lock_acquired = False - async def async_run(self) -> None: + async def async_run(self) -> ScriptRunResult | None: """Run script.""" # Wait for previous run, if any, to finish by attempting to acquire the script's # shared lock. At the same time monitor if we've been told to stop. @@ -1325,7 +1325,7 @@ class _QueuedScriptRun(_ScriptRun): self.lock_acquired = True # We've acquired the lock so we can go ahead and start the run. - await super().async_run() + return await super().async_run() def _finish(self) -> None: if self.lock_acquired: diff --git a/homeassistant/helpers/template.py b/homeassistant/helpers/template.py index 0d017dda64f..9468eb6bf49 100644 --- a/homeassistant/helpers/template.py +++ b/homeassistant/helpers/template.py @@ -1478,10 +1478,14 @@ def floors(hass: HomeAssistant) -> Iterable[str | None]: def floor_id(hass: HomeAssistant, lookup_value: Any) -> str | None: - """Get the floor ID from a floor name.""" + """Get the floor ID from a floor or area name, alias, device id, or entity id.""" floor_registry = fr.async_get(hass) - if floor := floor_registry.async_get_floor_by_name(str(lookup_value)): + lookup_str = str(lookup_value) + if floor := floor_registry.async_get_floor_by_name(lookup_str): return floor.floor_id + floors_list = floor_registry.async_get_floors_by_alias(lookup_str) + if floors_list: + return floors_list[0].floor_id if aid := area_id(hass, lookup_value): area_reg = area_registry.async_get(hass) @@ -1541,10 +1545,14 @@ def areas(hass: HomeAssistant) -> Iterable[str | None]: def area_id(hass: HomeAssistant, lookup_value: str) -> str | None: - """Get the area ID from an area name, device id, or entity id.""" + """Get the area ID from an area name, alias, device id, or entity id.""" area_reg = area_registry.async_get(hass) - if area := area_reg.async_get_area_by_name(str(lookup_value)): + lookup_str = str(lookup_value) + if area := area_reg.async_get_area_by_name(lookup_str): return area.id + areas_list = area_reg.async_get_areas_by_alias(lookup_str) + if areas_list: + return areas_list[0].id ent_reg = entity_registry.async_get(hass) dev_reg = device_registry.async_get(hass) @@ -2785,6 +2793,50 @@ def flatten(value: Iterable[Any], levels: int | None = None) -> list[Any]: return flattened +def intersect(value: Iterable[Any], other: Iterable[Any]) -> list[Any]: + """Return the common elements between two lists.""" + if not isinstance(value, Iterable) or isinstance(value, str): + raise TypeError(f"intersect expected a list, got {type(value).__name__}") + if not isinstance(other, Iterable) or isinstance(other, str): + raise TypeError(f"intersect expected a list, got {type(other).__name__}") + + return list(set(value) & set(other)) + + +def difference(value: Iterable[Any], other: Iterable[Any]) -> list[Any]: + """Return elements in first list that are not in second list.""" + if not isinstance(value, Iterable) or isinstance(value, str): + raise TypeError(f"difference expected a list, got {type(value).__name__}") + if not isinstance(other, Iterable) or isinstance(other, str): + raise TypeError(f"difference expected a list, got {type(other).__name__}") + + return list(set(value) - set(other)) + + +def union(value: Iterable[Any], other: Iterable[Any]) -> list[Any]: + """Return all unique elements from both lists combined.""" + if not isinstance(value, Iterable) or isinstance(value, str): + raise TypeError(f"union expected a list, got {type(value).__name__}") + if not isinstance(other, Iterable) or isinstance(other, str): + raise TypeError(f"union expected a list, got {type(other).__name__}") + + return list(set(value) | set(other)) + + +def symmetric_difference(value: Iterable[Any], other: Iterable[Any]) -> list[Any]: + """Return elements that are in either list but not in both.""" + if not isinstance(value, Iterable) or isinstance(value, str): + raise TypeError( + f"symmetric_difference expected a list, got {type(value).__name__}" + ) + if not isinstance(other, Iterable) or isinstance(other, str): + raise TypeError( + f"symmetric_difference expected a list, got {type(other).__name__}" + ) + + return list(set(value) ^ set(other)) + + def combine(*args: Any, recursive: bool = False) -> dict[Any, Any]: """Combine multiple dictionaries into one.""" if not args: @@ -2996,11 +3048,13 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): self.globals["bool"] = forgiving_boolean self.globals["combine"] = combine self.globals["cos"] = cosine + self.globals["difference"] = difference self.globals["e"] = math.e self.globals["flatten"] = flatten self.globals["float"] = forgiving_float self.globals["iif"] = iif self.globals["int"] = forgiving_int + self.globals["intersect"] = intersect self.globals["is_number"] = is_number self.globals["log"] = logarithm self.globals["max"] = min_max_from_filter(self.filters["max"], "max") @@ -3020,11 +3074,13 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): self.globals["sqrt"] = square_root self.globals["statistical_mode"] = statistical_mode self.globals["strptime"] = strptime + self.globals["symmetric_difference"] = symmetric_difference self.globals["tan"] = tangent self.globals["tau"] = math.pi * 2 self.globals["timedelta"] = timedelta self.globals["tuple"] = _to_tuple self.globals["typeof"] = typeof + self.globals["union"] = union self.globals["unpack"] = struct_unpack self.globals["urlencode"] = urlencode self.globals["version"] = version @@ -3049,11 +3105,13 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): self.filters["combine"] = combine self.filters["contains"] = contains self.filters["cos"] = cosine + self.filters["difference"] = difference self.filters["flatten"] = flatten self.filters["float"] = forgiving_float_filter self.filters["from_json"] = from_json self.filters["iif"] = iif self.filters["int"] = forgiving_int_filter + self.filters["intersect"] = intersect self.filters["is_defined"] = fail_when_undefined self.filters["is_number"] = is_number self.filters["log"] = logarithm @@ -3078,12 +3136,14 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): self.filters["slugify"] = slugify self.filters["sqrt"] = square_root self.filters["statistical_mode"] = statistical_mode + self.filters["symmetric_difference"] = symmetric_difference self.filters["tan"] = tangent self.filters["timestamp_custom"] = timestamp_custom self.filters["timestamp_local"] = timestamp_local self.filters["timestamp_utc"] = timestamp_utc self.filters["to_json"] = to_json self.filters["typeof"] = typeof + self.filters["union"] = union self.filters["unpack"] = struct_unpack self.filters["version"] = version diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index c327eb5cfa1..b75a859c5ea 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -10,7 +10,7 @@ aiohttp==3.11.14 aiohttp_cors==0.7.0 aiousbwatcher==1.1.1 aiozoneinfo==0.2.3 -annotatedyaml==0.4.4 +annotatedyaml==0.4.5 astral==2.2 async-interrupt==1.2.2 async-upnp-client==0.43.0 @@ -38,18 +38,18 @@ habluetooth==3.37.0 hass-nabucasa==0.94.0 hassil==2.2.3 home-assistant-bluetooth==1.13.1 -home-assistant-frontend==20250306.0 -home-assistant-intents==2025.3.23 +home-assistant-frontend==20250328.0 +home-assistant-intents==2025.3.28 httpx==0.28.1 ifaddr==0.2.0 Jinja2==3.1.6 lru-dict==1.3.0 mutagen==1.47.0 -orjson==3.10.15 +orjson==3.10.16 packaging>=23.1 paho-mqtt==2.1.0 Pillow==11.1.0 -propcache==0.3.0 +propcache==0.3.1 psutil-home-assistant==0.0.1 PyJWT==2.10.1 pymicro-vad==1.0.1 @@ -65,10 +65,10 @@ securetar==2025.2.1 SQLAlchemy==2.0.39 standard-aifc==3.13.0 standard-telnetlib==3.13.0 -typing-extensions>=4.12.2,<5.0 +typing-extensions>=4.13.0,<5.0 ulid-transform==1.4.0 urllib3>=1.26.5,<2 -uv==0.6.8 +uv==0.6.10 voluptuous-openapi==0.0.6 voluptuous-serialize==2.6.0 voluptuous==0.15.2 diff --git a/homeassistant/setup.py b/homeassistant/setup.py index 9572136559a..334e3a9e074 100644 --- a/homeassistant/setup.py +++ b/homeassistant/setup.py @@ -387,7 +387,7 @@ async def _async_setup_component( }, ) - _LOGGER.info("Setting up %s", domain) + _LOGGER.debug("Setting up %s", domain) with async_start_setup(hass, integration=domain, phase=SetupPhases.SETUP): if hasattr(component, "PLATFORM_SCHEMA"): @@ -783,7 +783,7 @@ def async_start_setup( # platforms, but we only care about the longest time. group_setup_times[phase] = max(group_setup_times[phase], time_taken) if group is None: - _LOGGER.info( + _LOGGER.debug( "Setup of domain %s took %.2f seconds", integration, time_taken ) elif _LOGGER.isEnabledFor(logging.DEBUG): diff --git a/homeassistant/strings.json b/homeassistant/strings.json index 29b7db7a011..13a6d1ef759 100644 --- a/homeassistant/strings.json +++ b/homeassistant/strings.json @@ -47,6 +47,7 @@ "access_token": "Access token", "api_key": "API key", "api_token": "API token", + "country": "Country", "device": "Device", "elevation": "Elevation", "email": "Email", @@ -119,6 +120,7 @@ "active": "Active", "charging": "Charging", "closed": "Closed", + "closing": "Closing", "connected": "Connected", "disabled": "Disabled", "discharging": "Discharging", @@ -132,8 +134,10 @@ "off": "Off", "on": "On", "open": "Open", + "opening": "Opening", "paused": "Paused", "standby": "Standby", + "stopped": "Stopped", "unlocked": "Unlocked", "yes": "Yes" }, diff --git a/homeassistant/util/unit_system.py b/homeassistant/util/unit_system.py index 15993cbae47..055f435503f 100644 --- a/homeassistant/util/unit_system.py +++ b/homeassistant/util/unit_system.py @@ -2,6 +2,7 @@ from __future__ import annotations +from dataclasses import dataclass from numbers import Number from typing import TYPE_CHECKING, Final @@ -82,9 +83,21 @@ def _is_valid_unit(unit: str, unit_type: str) -> bool: return False +@dataclass(frozen=True, kw_only=True) class UnitSystem: """A container for units of measure.""" + _name: str + accumulated_precipitation_unit: UnitOfPrecipitationDepth + area_unit: UnitOfArea + length_unit: UnitOfLength + mass_unit: UnitOfMass + pressure_unit: UnitOfPressure + temperature_unit: UnitOfTemperature + volume_unit: UnitOfVolume + wind_speed_unit: UnitOfSpeed + _conversions: dict[tuple[SensorDeviceClass | str | None, str | None], str] + def __init__( self, name: str, @@ -118,16 +131,16 @@ class UnitSystem: if errors: raise ValueError(errors) - self._name = name - self.accumulated_precipitation_unit = accumulated_precipitation - self.area_unit = area - self.length_unit = length - self.mass_unit = mass - self.pressure_unit = pressure - self.temperature_unit = temperature - self.volume_unit = volume - self.wind_speed_unit = wind_speed - self._conversions = conversions + super().__setattr__("_name", name) + super().__setattr__("accumulated_precipitation_unit", accumulated_precipitation) + super().__setattr__("area_unit", area) + super().__setattr__("length_unit", length) + super().__setattr__("mass_unit", mass) + super().__setattr__("pressure_unit", pressure) + super().__setattr__("temperature_unit", temperature) + super().__setattr__("volume_unit", volume) + super().__setattr__("wind_speed_unit", wind_speed) + super().__setattr__("_conversions", conversions) def temperature(self, temperature: float, from_unit: str) -> float: """Convert the given temperature to this unit system.""" diff --git a/mypy.ini b/mypy.ini index 852678677bb..9831a183ec4 100644 --- a/mypy.ini +++ b/mypy.ini @@ -945,6 +945,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.bosch_alarm.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.braviatv.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/pyproject.toml b/pyproject.toml index 1bd74791a18..a542ac26f20 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,98 +1,98 @@ [build-system] -requires = ["setuptools==77.0.1"] +requires = ["setuptools==77.0.3"] build-backend = "setuptools.build_meta" [project] -name = "homeassistant" -version = "2025.4.0.dev0" -license = "Apache-2.0" +name = "homeassistant" +version = "2025.5.0.dev0" +license = "Apache-2.0" license-files = ["LICENSE*", "homeassistant/backports/LICENSE*"] description = "Open-source home automation platform running on Python 3." -readme = "README.rst" -authors = [ - {name = "The Home Assistant Authors", email = "hello@home-assistant.io"} +readme = "README.rst" +authors = [ + { name = "The Home Assistant Authors", email = "hello@home-assistant.io" }, ] -keywords = ["home", "automation"] +keywords = ["home", "automation"] classifiers = [ - "Development Status :: 5 - Production/Stable", - "Intended Audience :: End Users/Desktop", - "Intended Audience :: Developers", - "Operating System :: OS Independent", - "Programming Language :: Python :: 3.13", - "Topic :: Home Automation", + "Development Status :: 5 - Production/Stable", + "Intended Audience :: End Users/Desktop", + "Intended Audience :: Developers", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3.13", + "Topic :: Home Automation", ] requires-python = ">=3.13.0" -dependencies = [ - "aiodns==3.2.0", - # Integrations may depend on hassio integration without listing it to - # change behavior based on presence of supervisor. Deprecated with #127228 - # Lib can be removed with 2025.11 - "aiohasupervisor==0.3.0", - "aiohttp==3.11.14", - "aiohttp_cors==0.7.0", - "aiohttp-fast-zlib==0.2.3", - "aiohttp-asyncmdnsresolver==0.1.1", - "aiozoneinfo==0.2.3", - "annotatedyaml==0.4.4", - "astral==2.2", - "async-interrupt==1.2.2", - "attrs==25.1.0", - "atomicwrites-homeassistant==1.4.1", - "audioop-lts==0.2.1", - "awesomeversion==24.6.0", - "bcrypt==4.2.0", - "certifi>=2021.5.30", - "ciso8601==2.3.2", - "cronsim==2.6", - "fnv-hash-fast==1.4.0", - # hass-nabucasa is imported by helpers which don't depend on the cloud - # integration - "hass-nabucasa==0.94.0", - # When bumping httpx, please check the version pins of - # httpcore, anyio, and h11 in gen_requirements_all - "httpx==0.28.1", - "home-assistant-bluetooth==1.13.1", - "ifaddr==0.2.0", - "Jinja2==3.1.6", - "lru-dict==1.3.0", - "PyJWT==2.10.1", - # PyJWT has loose dependency. We want the latest one. - "cryptography==44.0.1", - "Pillow==11.1.0", - "propcache==0.3.0", - "pyOpenSSL==25.0.0", - "orjson==3.10.15", - "packaging>=23.1", - "psutil-home-assistant==0.0.1", - "python-slugify==8.0.4", - "PyYAML==6.0.2", - "requests==2.32.3", - "securetar==2025.2.1", - "SQLAlchemy==2.0.39", - "standard-aifc==3.13.0", - "standard-telnetlib==3.13.0", - "typing-extensions>=4.12.2,<5.0", - "ulid-transform==1.4.0", - # Constrain urllib3 to ensure we deal with CVE-2020-26137 and CVE-2021-33503 - # Temporary setting an upper bound, to prevent compat issues with urllib3>=2 - # https://github.com/home-assistant/core/issues/97248 - "urllib3>=1.26.5,<2", - "uv==0.6.8", - "voluptuous==0.15.2", - "voluptuous-serialize==2.6.0", - "voluptuous-openapi==0.0.6", - "yarl==1.18.3", - "webrtc-models==0.3.0", - "zeroconf==0.146.0" +dependencies = [ + "aiodns==3.2.0", + # Integrations may depend on hassio integration without listing it to + # change behavior based on presence of supervisor. Deprecated with #127228 + # Lib can be removed with 2025.11 + "aiohasupervisor==0.3.0", + "aiohttp==3.11.14", + "aiohttp_cors==0.7.0", + "aiohttp-fast-zlib==0.2.3", + "aiohttp-asyncmdnsresolver==0.1.1", + "aiozoneinfo==0.2.3", + "annotatedyaml==0.4.5", + "astral==2.2", + "async-interrupt==1.2.2", + "attrs==25.1.0", + "atomicwrites-homeassistant==1.4.1", + "audioop-lts==0.2.1", + "awesomeversion==24.6.0", + "bcrypt==4.2.0", + "certifi>=2021.5.30", + "ciso8601==2.3.2", + "cronsim==2.6", + "fnv-hash-fast==1.4.0", + # hass-nabucasa is imported by helpers which don't depend on the cloud + # integration + "hass-nabucasa==0.94.0", + # When bumping httpx, please check the version pins of + # httpcore, anyio, and h11 in gen_requirements_all + "httpx==0.28.1", + "home-assistant-bluetooth==1.13.1", + "ifaddr==0.2.0", + "Jinja2==3.1.6", + "lru-dict==1.3.0", + "PyJWT==2.10.1", + # PyJWT has loose dependency. We want the latest one. + "cryptography==44.0.1", + "Pillow==11.1.0", + "propcache==0.3.1", + "pyOpenSSL==25.0.0", + "orjson==3.10.16", + "packaging>=23.1", + "psutil-home-assistant==0.0.1", + "python-slugify==8.0.4", + "PyYAML==6.0.2", + "requests==2.32.3", + "securetar==2025.2.1", + "SQLAlchemy==2.0.39", + "standard-aifc==3.13.0", + "standard-telnetlib==3.13.0", + "typing-extensions>=4.13.0,<5.0", + "ulid-transform==1.4.0", + # Constrain urllib3 to ensure we deal with CVE-2020-26137 and CVE-2021-33503 + # Temporary setting an upper bound, to prevent compat issues with urllib3>=2 + # https://github.com/home-assistant/core/issues/97248 + "urllib3>=1.26.5,<2", + "uv==0.6.10", + "voluptuous==0.15.2", + "voluptuous-serialize==2.6.0", + "voluptuous-openapi==0.0.6", + "yarl==1.18.3", + "webrtc-models==0.3.0", + "zeroconf==0.146.0", ] [project.urls] -"Homepage" = "https://www.home-assistant.io/" +"Homepage" = "https://www.home-assistant.io/" "Source Code" = "https://github.com/home-assistant/core" "Bug Reports" = "https://github.com/home-assistant/core/issues" -"Docs: Dev" = "https://developers.home-assistant.io/" -"Discord" = "https://www.home-assistant.io/join-chat/" -"Forum" = "https://community.home-assistant.io/" +"Docs: Dev" = "https://developers.home-assistant.io/" +"Discord" = "https://www.home-assistant.io/join-chat/" +"Forum" = "https://community.home-assistant.io/" [project.scripts] hass = "homeassistant.__main__:main" @@ -119,30 +119,28 @@ init-hook = """\ ) \ """ load-plugins = [ - "pylint.extensions.code_style", - "pylint.extensions.typing", - "hass_decorator", - "hass_enforce_class_module", - "hass_enforce_sorted_platforms", - "hass_enforce_super_call", - "hass_enforce_type_hints", - "hass_inheritance", - "hass_imports", - "hass_logger", - "pylint_per_file_ignores", + "pylint.extensions.code_style", + "pylint.extensions.typing", + "hass_decorator", + "hass_enforce_class_module", + "hass_enforce_sorted_platforms", + "hass_enforce_super_call", + "hass_enforce_type_hints", + "hass_inheritance", + "hass_imports", + "hass_logger", + "pylint_per_file_ignores", ] persistent = false extension-pkg-allow-list = [ - "av.audio.stream", - "av.logging", - "av.stream", - "ciso8601", - "orjson", - "cv2", -] -fail-on = [ - "I", + "av.audio.stream", + "av.logging", + "av.stream", + "ciso8601", + "orjson", + "cv2", ] +fail-on = ["I"] [tool.pylint.BASIC] class-const-naming-style = "any" @@ -167,257 +165,257 @@ class-const-naming-style = "any" # consider-using-namedtuple-or-dataclass - too opinionated # consider-using-assignment-expr - decision to use := better left to devs disable = [ - "format", - "abstract-method", - "cyclic-import", - "duplicate-code", - "inconsistent-return-statements", - "locally-disabled", - "not-context-manager", - "too-few-public-methods", - "too-many-ancestors", - "too-many-arguments", - "too-many-instance-attributes", - "too-many-lines", - "too-many-locals", - "too-many-public-methods", - "too-many-boolean-expressions", - "too-many-positional-arguments", - "wrong-import-order", - "consider-using-namedtuple-or-dataclass", - "consider-using-assignment-expr", - "possibly-used-before-assignment", + "format", + "abstract-method", + "cyclic-import", + "duplicate-code", + "inconsistent-return-statements", + "locally-disabled", + "not-context-manager", + "too-few-public-methods", + "too-many-ancestors", + "too-many-arguments", + "too-many-instance-attributes", + "too-many-lines", + "too-many-locals", + "too-many-public-methods", + "too-many-boolean-expressions", + "too-many-positional-arguments", + "wrong-import-order", + "consider-using-namedtuple-or-dataclass", + "consider-using-assignment-expr", + "possibly-used-before-assignment", - # Handled by ruff - # Ref: - "await-outside-async", # PLE1142 - "bad-str-strip-call", # PLE1310 - "bad-string-format-type", # PLE1307 - "bidirectional-unicode", # PLE2502 - "continue-in-finally", # PLE0116 - "duplicate-bases", # PLE0241 - "misplaced-bare-raise", # PLE0704 - "format-needs-mapping", # F502 - "function-redefined", # F811 - # Needed because ruff does not understand type of __all__ generated by a function - # "invalid-all-format", # PLE0605 - "invalid-all-object", # PLE0604 - "invalid-character-backspace", # PLE2510 - "invalid-character-esc", # PLE2513 - "invalid-character-nul", # PLE2514 - "invalid-character-sub", # PLE2512 - "invalid-character-zero-width-space", # PLE2515 - "logging-too-few-args", # PLE1206 - "logging-too-many-args", # PLE1205 - "missing-format-string-key", # F524 - "mixed-format-string", # F506 - "no-method-argument", # N805 - "no-self-argument", # N805 - "nonexistent-operator", # B002 - "nonlocal-without-binding", # PLE0117 - "not-in-loop", # F701, F702 - "notimplemented-raised", # F901 - "return-in-init", # PLE0101 - "return-outside-function", # F706 - "syntax-error", # E999 - "too-few-format-args", # F524 - "too-many-format-args", # F522 - "too-many-star-expressions", # F622 - "truncated-format-string", # F501 - "undefined-all-variable", # F822 - "undefined-variable", # F821 - "used-prior-global-declaration", # PLE0118 - "yield-inside-async-function", # PLE1700 - "yield-outside-function", # F704 - "anomalous-backslash-in-string", # W605 - "assert-on-string-literal", # PLW0129 - "assert-on-tuple", # F631 - "bad-format-string", # W1302, F - "bad-format-string-key", # W1300, F - "bare-except", # E722 - "binary-op-exception", # PLW0711 - "cell-var-from-loop", # B023 - # "dangerous-default-value", # B006, ruff catches new occurrences, needs more work - "duplicate-except", # B014 - "duplicate-key", # F601 - "duplicate-string-formatting-argument", # F - "duplicate-value", # F - "eval-used", # S307 - "exec-used", # S102 - "expression-not-assigned", # B018 - "f-string-without-interpolation", # F541 - "forgotten-debug-statement", # T100 - "format-string-without-interpolation", # F - # "global-statement", # PLW0603, ruff catches new occurrences, needs more work - "global-variable-not-assigned", # PLW0602 - "implicit-str-concat", # ISC001 - "import-self", # PLW0406 - "inconsistent-quotes", # Q000 - "invalid-envvar-default", # PLW1508 - "keyword-arg-before-vararg", # B026 - "logging-format-interpolation", # G - "logging-fstring-interpolation", # G - "logging-not-lazy", # G - "misplaced-future", # F404 - "named-expr-without-context", # PLW0131 - "nested-min-max", # PLW3301 - "pointless-statement", # B018 - "raise-missing-from", # B904 - "redefined-builtin", # A001 - "try-except-raise", # TRY302 - "unused-argument", # ARG001, we don't use it - "unused-format-string-argument", #F507 - "unused-format-string-key", # F504 - "unused-import", # F401 - "unused-variable", # F841 - "useless-else-on-loop", # PLW0120 - "wildcard-import", # F403 - "bad-classmethod-argument", # N804 - "consider-iterating-dictionary", # SIM118 - "empty-docstring", # D419 - "invalid-name", # N815 - "line-too-long", # E501, disabled globally - "missing-class-docstring", # D101 - "missing-final-newline", # W292 - "missing-function-docstring", # D103 - "missing-module-docstring", # D100 - "multiple-imports", #E401 - "singleton-comparison", # E711, E712 - "subprocess-run-check", # PLW1510 - "superfluous-parens", # UP034 - "ungrouped-imports", # I001 - "unidiomatic-typecheck", # E721 - "unnecessary-direct-lambda-call", # PLC3002 - "unnecessary-lambda-assignment", # PLC3001 - "unnecessary-pass", # PIE790 - "unneeded-not", # SIM208 - "useless-import-alias", # PLC0414 - "wrong-import-order", # I001 - "wrong-import-position", # E402 - "comparison-of-constants", # PLR0133 - "comparison-with-itself", # PLR0124 - "consider-alternative-union-syntax", # UP007 - "consider-merging-isinstance", # PLR1701 - "consider-using-alias", # UP006 - "consider-using-dict-comprehension", # C402 - "consider-using-generator", # C417 - "consider-using-get", # SIM401 - "consider-using-set-comprehension", # C401 - "consider-using-sys-exit", # PLR1722 - "consider-using-ternary", # SIM108 - "literal-comparison", # F632 - "property-with-parameters", # PLR0206 - "super-with-arguments", # UP008 - "too-many-branches", # PLR0912 - "too-many-return-statements", # PLR0911 - "too-many-statements", # PLR0915 - "trailing-comma-tuple", # COM818 - "unnecessary-comprehension", # C416 - "use-a-generator", # C417 - "use-dict-literal", # C406 - "use-list-literal", # C405 - "useless-object-inheritance", # UP004 - "useless-return", # PLR1711 - "no-else-break", # RET508 - "no-else-continue", # RET507 - "no-else-raise", # RET506 - "no-else-return", # RET505 - "broad-except", # BLE001 - "protected-access", # SLF001 - "broad-exception-raised", # TRY002 - "consider-using-f-string", # PLC0209 - # "no-self-use", # PLR6301 # Optional plugin, not enabled + # Handled by ruff + # Ref: + "await-outside-async", # PLE1142 + "bad-str-strip-call", # PLE1310 + "bad-string-format-type", # PLE1307 + "bidirectional-unicode", # PLE2502 + "continue-in-finally", # PLE0116 + "duplicate-bases", # PLE0241 + "misplaced-bare-raise", # PLE0704 + "format-needs-mapping", # F502 + "function-redefined", # F811 + # Needed because ruff does not understand type of __all__ generated by a function + # "invalid-all-format", # PLE0605 + "invalid-all-object", # PLE0604 + "invalid-character-backspace", # PLE2510 + "invalid-character-esc", # PLE2513 + "invalid-character-nul", # PLE2514 + "invalid-character-sub", # PLE2512 + "invalid-character-zero-width-space", # PLE2515 + "logging-too-few-args", # PLE1206 + "logging-too-many-args", # PLE1205 + "missing-format-string-key", # F524 + "mixed-format-string", # F506 + "no-method-argument", # N805 + "no-self-argument", # N805 + "nonexistent-operator", # B002 + "nonlocal-without-binding", # PLE0117 + "not-in-loop", # F701, F702 + "notimplemented-raised", # F901 + "return-in-init", # PLE0101 + "return-outside-function", # F706 + "syntax-error", # E999 + "too-few-format-args", # F524 + "too-many-format-args", # F522 + "too-many-star-expressions", # F622 + "truncated-format-string", # F501 + "undefined-all-variable", # F822 + "undefined-variable", # F821 + "used-prior-global-declaration", # PLE0118 + "yield-inside-async-function", # PLE1700 + "yield-outside-function", # F704 + "anomalous-backslash-in-string", # W605 + "assert-on-string-literal", # PLW0129 + "assert-on-tuple", # F631 + "bad-format-string", # W1302, F + "bad-format-string-key", # W1300, F + "bare-except", # E722 + "binary-op-exception", # PLW0711 + "cell-var-from-loop", # B023 + # "dangerous-default-value", # B006, ruff catches new occurrences, needs more work + "duplicate-except", # B014 + "duplicate-key", # F601 + "duplicate-string-formatting-argument", # F + "duplicate-value", # F + "eval-used", # S307 + "exec-used", # S102 + "expression-not-assigned", # B018 + "f-string-without-interpolation", # F541 + "forgotten-debug-statement", # T100 + "format-string-without-interpolation", # F + # "global-statement", # PLW0603, ruff catches new occurrences, needs more work + "global-variable-not-assigned", # PLW0602 + "implicit-str-concat", # ISC001 + "import-self", # PLW0406 + "inconsistent-quotes", # Q000 + "invalid-envvar-default", # PLW1508 + "keyword-arg-before-vararg", # B026 + "logging-format-interpolation", # G + "logging-fstring-interpolation", # G + "logging-not-lazy", # G + "misplaced-future", # F404 + "named-expr-without-context", # PLW0131 + "nested-min-max", # PLW3301 + "pointless-statement", # B018 + "raise-missing-from", # B904 + "redefined-builtin", # A001 + "try-except-raise", # TRY302 + "unused-argument", # ARG001, we don't use it + "unused-format-string-argument", #F507 + "unused-format-string-key", # F504 + "unused-import", # F401 + "unused-variable", # F841 + "useless-else-on-loop", # PLW0120 + "wildcard-import", # F403 + "bad-classmethod-argument", # N804 + "consider-iterating-dictionary", # SIM118 + "empty-docstring", # D419 + "invalid-name", # N815 + "line-too-long", # E501, disabled globally + "missing-class-docstring", # D101 + "missing-final-newline", # W292 + "missing-function-docstring", # D103 + "missing-module-docstring", # D100 + "multiple-imports", #E401 + "singleton-comparison", # E711, E712 + "subprocess-run-check", # PLW1510 + "superfluous-parens", # UP034 + "ungrouped-imports", # I001 + "unidiomatic-typecheck", # E721 + "unnecessary-direct-lambda-call", # PLC3002 + "unnecessary-lambda-assignment", # PLC3001 + "unnecessary-pass", # PIE790 + "unneeded-not", # SIM208 + "useless-import-alias", # PLC0414 + "wrong-import-order", # I001 + "wrong-import-position", # E402 + "comparison-of-constants", # PLR0133 + "comparison-with-itself", # PLR0124 + "consider-alternative-union-syntax", # UP007 + "consider-merging-isinstance", # PLR1701 + "consider-using-alias", # UP006 + "consider-using-dict-comprehension", # C402 + "consider-using-generator", # C417 + "consider-using-get", # SIM401 + "consider-using-set-comprehension", # C401 + "consider-using-sys-exit", # PLR1722 + "consider-using-ternary", # SIM108 + "literal-comparison", # F632 + "property-with-parameters", # PLR0206 + "super-with-arguments", # UP008 + "too-many-branches", # PLR0912 + "too-many-return-statements", # PLR0911 + "too-many-statements", # PLR0915 + "trailing-comma-tuple", # COM818 + "unnecessary-comprehension", # C416 + "use-a-generator", # C417 + "use-dict-literal", # C406 + "use-list-literal", # C405 + "useless-object-inheritance", # UP004 + "useless-return", # PLR1711 + "no-else-break", # RET508 + "no-else-continue", # RET507 + "no-else-raise", # RET506 + "no-else-return", # RET505 + "broad-except", # BLE001 + "protected-access", # SLF001 + "broad-exception-raised", # TRY002 + "consider-using-f-string", # PLC0209 + # "no-self-use", # PLR6301 # Optional plugin, not enabled - # Handled by mypy - # Ref: - "abstract-class-instantiated", - "arguments-differ", - "assigning-non-slot", - "assignment-from-no-return", - "assignment-from-none", - "bad-exception-cause", - "bad-format-character", - "bad-reversed-sequence", - "bad-super-call", - "bad-thread-instantiation", - "catching-non-exception", - "comparison-with-callable", - "deprecated-class", - "dict-iter-missing-items", - "format-combined-specification", - "global-variable-undefined", - "import-error", - "inconsistent-mro", - "inherit-non-class", - "init-is-generator", - "invalid-class-object", - "invalid-enum-extension", - "invalid-envvar-value", - "invalid-format-returned", - "invalid-hash-returned", - "invalid-metaclass", - "invalid-overridden-method", - "invalid-repr-returned", - "invalid-sequence-index", - "invalid-slice-index", - "invalid-slots-object", - "invalid-slots", - "invalid-star-assignment-target", - "invalid-str-returned", - "invalid-unary-operand-type", - "invalid-unicode-codec", - "isinstance-second-argument-not-valid-type", - "method-hidden", - "misplaced-format-function", - "missing-format-argument-key", - "missing-format-attribute", - "missing-kwoa", - "no-member", - "no-value-for-parameter", - "non-iterator-returned", - "non-str-assignment-to-dunder-name", - "nonlocal-and-global", - "not-a-mapping", - "not-an-iterable", - "not-async-context-manager", - "not-callable", - "not-context-manager", - "overridden-final-method", - "raising-bad-type", - "raising-non-exception", - "redundant-keyword-arg", - "relative-beyond-top-level", - "self-cls-assignment", - "signature-differs", - "star-needs-assignment-target", - "subclassed-final-class", - "super-without-brackets", - "too-many-function-args", - "typevar-double-variance", - "typevar-name-mismatch", - "unbalanced-dict-unpacking", - "unbalanced-tuple-unpacking", - "unexpected-keyword-arg", - "unhashable-member", - "unpacking-non-sequence", - "unsubscriptable-object", - "unsupported-assignment-operation", - "unsupported-binary-operation", - "unsupported-delete-operation", - "unsupported-membership-test", - "used-before-assignment", - "using-final-decorator-in-unsupported-version", - "wrong-exception-operation", + # Handled by mypy + # Ref: + "abstract-class-instantiated", + "arguments-differ", + "assigning-non-slot", + "assignment-from-no-return", + "assignment-from-none", + "bad-exception-cause", + "bad-format-character", + "bad-reversed-sequence", + "bad-super-call", + "bad-thread-instantiation", + "catching-non-exception", + "comparison-with-callable", + "deprecated-class", + "dict-iter-missing-items", + "format-combined-specification", + "global-variable-undefined", + "import-error", + "inconsistent-mro", + "inherit-non-class", + "init-is-generator", + "invalid-class-object", + "invalid-enum-extension", + "invalid-envvar-value", + "invalid-format-returned", + "invalid-hash-returned", + "invalid-metaclass", + "invalid-overridden-method", + "invalid-repr-returned", + "invalid-sequence-index", + "invalid-slice-index", + "invalid-slots-object", + "invalid-slots", + "invalid-star-assignment-target", + "invalid-str-returned", + "invalid-unary-operand-type", + "invalid-unicode-codec", + "isinstance-second-argument-not-valid-type", + "method-hidden", + "misplaced-format-function", + "missing-format-argument-key", + "missing-format-attribute", + "missing-kwoa", + "no-member", + "no-value-for-parameter", + "non-iterator-returned", + "non-str-assignment-to-dunder-name", + "nonlocal-and-global", + "not-a-mapping", + "not-an-iterable", + "not-async-context-manager", + "not-callable", + "not-context-manager", + "overridden-final-method", + "raising-bad-type", + "raising-non-exception", + "redundant-keyword-arg", + "relative-beyond-top-level", + "self-cls-assignment", + "signature-differs", + "star-needs-assignment-target", + "subclassed-final-class", + "super-without-brackets", + "too-many-function-args", + "typevar-double-variance", + "typevar-name-mismatch", + "unbalanced-dict-unpacking", + "unbalanced-tuple-unpacking", + "unexpected-keyword-arg", + "unhashable-member", + "unpacking-non-sequence", + "unsubscriptable-object", + "unsupported-assignment-operation", + "unsupported-binary-operation", + "unsupported-delete-operation", + "unsupported-membership-test", + "used-before-assignment", + "using-final-decorator-in-unsupported-version", + "wrong-exception-operation", ] enable = [ - #"useless-suppression", # temporarily every now and then to clean them up - "use-symbolic-message-instead", + #"useless-suppression", # temporarily every now and then to clean them up + "use-symbolic-message-instead", ] per-file-ignores = [ - # redefined-outer-name: Tests reference fixtures in the test function - # use-implicit-booleaness-not-comparison: Tests need to validate that a list - # or a dict is returned - "/tests/:redefined-outer-name,use-implicit-booleaness-not-comparison", + # redefined-outer-name: Tests reference fixtures in the test function + # use-implicit-booleaness-not-comparison: Tests need to validate that a list + # or a dict is returned + "/tests/:redefined-outer-name,use-implicit-booleaness-not-comparison", ] [tool.pylint.REPORTS] @@ -425,7 +423,7 @@ score = false [tool.pylint.TYPECHECK] ignored-classes = [ - "_CountingAttr", # for attrs + "_CountingAttr", # for attrs ] mixin-class-rgx = ".*[Mm]ix[Ii]n" @@ -434,9 +432,9 @@ expected-line-ending-format = "LF" [tool.pylint.EXCEPTIONS] overgeneral-exceptions = [ - "builtins.BaseException", - "builtins.Exception", - # "homeassistant.exceptions.HomeAssistantError", # too many issues + "builtins.BaseException", + "builtins.Exception", + # "homeassistant.exceptions.HomeAssistantError", # too many issues ] [tool.pylint.TYPING] @@ -446,241 +444,234 @@ runtime-typing = false max-line-length-suggestions = 72 [tool.pytest.ini_options] -testpaths = [ - "tests", -] -norecursedirs = [ - ".git", - "testing_config", -] +testpaths = ["tests"] +norecursedirs = [".git", "testing_config"] log_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(threadName)s %(name)s:%(filename)s:%(lineno)s %(message)s" log_date_format = "%Y-%m-%d %H:%M:%S" asyncio_mode = "auto" asyncio_default_fixture_loop_scope = "function" filterwarnings = [ - "error::sqlalchemy.exc.SAWarning", + "error::sqlalchemy.exc.SAWarning", - # -- HomeAssistant - aiohttp - # Overwrite web.Application to pass a custom default argument to _make_request - "ignore:Inheritance class HomeAssistantApplication from web.Application is discouraged:DeprecationWarning", - # Hass wraps `ClientSession.close` to emit a warning if the session is closed accidentally - "ignore:Setting custom ClientSession.close attribute is discouraged:DeprecationWarning:homeassistant.helpers.aiohttp_client", - # Modify app state for testing - "ignore:Changing state of started or joined application is deprecated:DeprecationWarning:tests.components.http.test_ban", + # -- HomeAssistant - aiohttp + # Overwrite web.Application to pass a custom default argument to _make_request + "ignore:Inheritance class HomeAssistantApplication from web.Application is discouraged:DeprecationWarning", + # Hass wraps `ClientSession.close` to emit a warning if the session is closed accidentally + "ignore:Setting custom ClientSession.close attribute is discouraged:DeprecationWarning:homeassistant.helpers.aiohttp_client", + # Modify app state for testing + "ignore:Changing state of started or joined application is deprecated:DeprecationWarning:tests.components.http.test_ban", # -- Tests # Ignore custom pytest marks "ignore:Unknown pytest.mark.disable_autouse_fixture:pytest.PytestUnknownMarkWarning:tests.components.met", "ignore:Unknown pytest.mark.dataset:pytest.PytestUnknownMarkWarning:tests.components.screenlogic", - # https://github.com/rokam/sunweg/blob/3.1.0/sunweg/plant.py#L96 - v3.1.0 - 2024-10-02 - "ignore:The '(kwh_per_kwp|performance_rate)' property is deprecated and will return 0:DeprecationWarning:tests.components.sunweg.test_init", - # -- design choice 3rd party - # https://github.com/gwww/elkm1/blob/2.2.10/elkm1_lib/util.py#L8-L19 - "ignore:ssl.TLSVersion.TLSv1 is deprecated:DeprecationWarning:elkm1_lib.util", - # https://github.com/allenporter/ical/pull/215 - # https://github.com/allenporter/ical/blob/8.2.0/ical/util.py#L21-L23 - "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:ical.util", - # https://github.com/bachya/regenmaschine/blob/2024.03.0/regenmaschine/client.py#L52 - "ignore:ssl.TLSVersion.SSLv3 is deprecated:DeprecationWarning:regenmaschine.client", + # -- design choice 3rd party + # https://github.com/gwww/elkm1/blob/2.2.10/elkm1_lib/util.py#L8-L19 + "ignore:ssl.TLSVersion.TLSv1 is deprecated:DeprecationWarning:elkm1_lib.util", + # https://github.com/allenporter/ical/pull/215 + # https://github.com/allenporter/ical/blob/8.2.0/ical/util.py#L21-L23 + "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:ical.util", + # https://github.com/bachya/regenmaschine/blob/2024.03.0/regenmaschine/client.py#L52 + "ignore:ssl.TLSVersion.SSLv3 is deprecated:DeprecationWarning:regenmaschine.client", - # -- Setuptools DeprecationWarnings - # https://github.com/googleapis/google-cloud-python/issues/11184 - # https://github.com/zopefoundation/meta/issues/194 - # https://github.com/Azure/azure-sdk-for-python - "ignore:Deprecated call to `pkg_resources.declare_namespace\\(('azure'|'google.*'|'pywinusb'|'repoze'|'xbox'|'zope')\\)`:DeprecationWarning:pkg_resources", + # -- Setuptools DeprecationWarnings + # https://github.com/googleapis/google-cloud-python/issues/11184 + # https://github.com/zopefoundation/meta/issues/194 + # https://github.com/Azure/azure-sdk-for-python + "ignore:Deprecated call to `pkg_resources.declare_namespace\\(('azure'|'google.*'|'pywinusb'|'repoze'|'xbox'|'zope')\\)`:DeprecationWarning:pkg_resources", - # -- tracked upstream / open PRs - # - pyOpenSSL v24.2.1 - # https://github.com/certbot/certbot/issues/9828 - v2.11.0 - # https://github.com/certbot/certbot/issues/9992 - "ignore:X509Extension support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:acme.crypto_util", - "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:acme.crypto_util", - "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:josepy.util", - # - other - # https://github.com/foxel/python_ndms2_client/issues/6 - v0.1.3 - # https://github.com/foxel/python_ndms2_client/pull/8 - "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:ndms2_client.connection", + # -- tracked upstream / open PRs + # - pyOpenSSL v24.2.1 + # https://github.com/certbot/certbot/issues/9828 - v2.11.0 + # https://github.com/certbot/certbot/issues/9992 + "ignore:X509Extension support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:acme.crypto_util", + "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:acme.crypto_util", + "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:josepy.util", + # - other + # https://github.com/foxel/python_ndms2_client/issues/6 - v0.1.3 + # https://github.com/foxel/python_ndms2_client/pull/8 + "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:ndms2_client.connection", - # -- fixed, waiting for release / update - # https://github.com/bachya/aiopurpleair/pull/200 - >=2023.10.0 - "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:aiopurpleair.helpers.validators", - # https://bugs.launchpad.net/beautifulsoup/+bug/2076897 - >4.12.3 - "ignore:The 'strip_cdata' option of HTMLParser\\(\\) has never done anything and will eventually be removed:DeprecationWarning:bs4.builder._lxml", - # https://github.com/DataDog/datadogpy/pull/290 - >=0.23.0 - "ignore:invalid escape sequence:SyntaxWarning:.*datadog.dogstatsd.base", - # https://github.com/DataDog/datadogpy/pull/566/files - >=0.37.0 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:datadog.util.compat", - # https://github.com/fwestenberg/devialet/pull/6 - >1.4.5 - "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:devialet.devialet_api", - # https://github.com/httplib2/httplib2/pull/226 - >=0.21.0 - "ignore:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning:httplib2", - # https://github.com/influxdata/influxdb-client-python/issues/603 >=1.45.0 - # https://github.com/influxdata/influxdb-client-python/pull/652 - "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:influxdb_client.client.write.point", - # https://github.com/majuss/lupupy/pull/15 - >0.3.2 - "ignore:\"is not\" with 'str' literal. Did you mean \"!=\"?:SyntaxWarning:.*lupupy.devices.alarm", - # https://github.com/nextcord/nextcord/pull/1095 - >2.6.1 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:nextcord.health_check", - # https://github.com/eclipse/paho.mqtt.python/issues/653 - >=2.0.0 - # https://github.com/eclipse/paho.mqtt.python/pull/665 - "ignore:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning:paho.mqtt.client", - # https://github.com/vacanza/python-holidays/discussions/1800 - >1.0.0 - "ignore::DeprecationWarning:holidays", - # https://github.com/rytilahti/python-miio/pull/1809 - >=0.6.0.dev0 - "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:miio.protocol", - "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:miio.miioprotocol", - # https://github.com/okunishinishi/python-stringcase/commit/6a5c5bbd3fe5337862abc7fd0853a0f36e18b2e1 - >1.2.0 - "ignore:invalid escape sequence:SyntaxWarning:.*stringcase", + # -- fixed, waiting for release / update + # https://github.com/bachya/aiopurpleair/pull/200 - >=2023.10.0 + "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:aiopurpleair.helpers.validators", + # https://bugs.launchpad.net/beautifulsoup/+bug/2076897 - >4.12.3 + "ignore:The 'strip_cdata' option of HTMLParser\\(\\) has never done anything and will eventually be removed:DeprecationWarning:bs4.builder._lxml", + # https://github.com/DataDog/datadogpy/pull/290 - >=0.23.0 + "ignore:invalid escape sequence:SyntaxWarning:.*datadog.dogstatsd.base", + # https://github.com/DataDog/datadogpy/pull/566/files - >=0.37.0 + "ignore:pkg_resources is deprecated as an API:DeprecationWarning:datadog.util.compat", + # https://github.com/fwestenberg/devialet/pull/6 - >1.4.5 + "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:devialet.devialet_api", + # https://github.com/httplib2/httplib2/pull/226 - >=0.21.0 + "ignore:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning:httplib2", + # https://github.com/influxdata/influxdb-client-python/issues/603 >=1.45.0 + # https://github.com/influxdata/influxdb-client-python/pull/652 + "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:influxdb_client.client.write.point", + # https://github.com/majuss/lupupy/pull/15 - >0.3.2 + "ignore:\"is not\" with 'str' literal. Did you mean \"!=\"?:SyntaxWarning:.*lupupy.devices.alarm", + # https://github.com/nextcord/nextcord/pull/1095 - >2.6.1 + "ignore:pkg_resources is deprecated as an API:DeprecationWarning:nextcord.health_check", + # https://github.com/eclipse/paho.mqtt.python/issues/653 - >=2.0.0 + # https://github.com/eclipse/paho.mqtt.python/pull/665 + "ignore:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning:paho.mqtt.client", + # https://github.com/vacanza/python-holidays/discussions/1800 - >1.0.0 + "ignore::DeprecationWarning:holidays", + # https://github.com/rytilahti/python-miio/pull/1809 - >=0.6.0.dev0 + "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:miio.protocol", + "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:miio.miioprotocol", + # https://github.com/okunishinishi/python-stringcase/commit/6a5c5bbd3fe5337862abc7fd0853a0f36e18b2e1 - >1.2.0 + "ignore:invalid escape sequence:SyntaxWarning:.*stringcase", - # -- fixed for Python 3.13 - # https://github.com/rhasspy/wyoming/commit/e34af30d455b6f2bb9e5cfb25fad8d276914bc54 - >=1.4.2 - "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:wyoming.audio", + # -- fixed for Python 3.13 + # https://github.com/rhasspy/wyoming/commit/e34af30d455b6f2bb9e5cfb25fad8d276914bc54 - >=1.4.2 + "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:wyoming.audio", - # -- other - # Locale changes might take some time to resolve upstream - # https://github.com/Squachen/micloud/blob/v_0.6/micloud/micloud.py#L35 - v0.6 - 2022-12-08 - "ignore:'locale.getdefaultlocale' is deprecated and slated for removal in Python 3.15:DeprecationWarning:micloud.micloud", - # https://github.com/MatsNl/pyatag/issues/11 - v0.3.7.1 - 2023-10-09 - "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:pyatag.gateway", - # https://github.com/lidatong/dataclasses-json/issues/328 - # https://github.com/lidatong/dataclasses-json/pull/351 - "ignore:The 'default' argument to fields is deprecated. Use 'dump_default' instead:DeprecationWarning:dataclasses_json.mm", - # https://pypi.org/project/emulated-roku/ - v0.3.0 - 2023-12-19 - # https://github.com/martonperei/emulated_roku - "ignore:loop argument is deprecated:DeprecationWarning:emulated_roku", - # https://github.com/w1ll1am23/pyeconet/blob/v0.1.23/src/pyeconet/api.py#L38 - v0.1.23 - 2024-10-08 - "ignore:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning:pyeconet.api", - # https://github.com/thecynic/pylutron - v0.2.16 - 2024-10-22 - "ignore:setDaemon\\(\\) is deprecated, set the daemon attribute instead:DeprecationWarning:pylutron", - # https://github.com/pschmitt/pynuki/blob/1.6.3/pynuki/utils.py#L21 - v1.6.3 - 2024-02-24 - "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:pynuki.utils", - # https://github.com/lextudio/pysnmp/blob/v7.1.10/pysnmp/smi/compiler.py#L23-L31 - v7.1.10 - 2024-11-04 - "ignore:smiV1Relaxed is deprecated. Please use smi_v1_relaxed instead:DeprecationWarning:pysnmp.smi.compiler", - "ignore:getReadersFromUrls is deprecated. Please use get_readers_from_urls instead:DeprecationWarning:pysmi.reader.url", # wrong stacklevel - # https://github.com/briis/pyweatherflowudp/blob/v1.4.5/pyweatherflowudp/const.py#L20 - v1.4.5 - 2023-10-10 - "ignore:This function will be removed in future versions of pint:DeprecationWarning:pyweatherflowudp.const", - # Wrong stacklevel - # https://bugs.launchpad.net/beautifulsoup/+bug/2034451 fixed in >4.12.3 - "ignore:It looks like you're parsing an XML document using an HTML parser:UserWarning:html.parser", - # New in aiohttp - v3.9.0 - "ignore:It is recommended to use web.AppKey instances for keys:UserWarning:(homeassistant|tests|aiohttp_cors)", - # - SyntaxWarnings - # https://pypi.org/project/aprslib/ - v0.7.2 - 2022-07-10 - "ignore:invalid escape sequence:SyntaxWarning:.*aprslib.parsing.common", - "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:aprslib.parsing.common", - # https://pypi.org/project/panasonic-viera/ - v0.4.2 - 2024-04-24 - # https://github.com/florianholzapfel/panasonic-viera/blob/0.4.2/panasonic_viera/__init__.py#L789 - "ignore:invalid escape sequence:SyntaxWarning:.*panasonic_viera", - # https://pypi.org/project/pyblackbird/ - v0.6 - 2023-03-15 - # https://github.com/koolsb/pyblackbird/pull/9 -> closed - "ignore:invalid escape sequence:SyntaxWarning:.*pyblackbird", - # https://pypi.org/project/pyws66i/ - v1.1 - 2022-04-05 - "ignore:invalid escape sequence:SyntaxWarning:.*pyws66i", - # https://pypi.org/project/sanix/ - v1.0.6 - 2024-05-01 - # https://github.com/tomaszsluszniak/sanix_py/blob/v1.0.6/sanix/__init__.py#L42 - "ignore:invalid escape sequence:SyntaxWarning:.*sanix", - # https://pypi.org/project/sleekxmppfs/ - v1.4.1 - 2022-08-18 - "ignore:invalid escape sequence:SyntaxWarning:.*sleekxmppfs.thirdparty.mini_dateutil", # codespell:ignore thirdparty - # - pkg_resources - # https://pypi.org/project/aiomusiccast/ - v0.14.8 - 2023-03-20 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:aiomusiccast", - # https://pypi.org/project/habitipy/ - v0.3.3 - 2024-10-28 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:habitipy.api", - # https://github.com/eavanvalkenburg/pysiaalarm/blob/v3.1.1/src/pysiaalarm/data/data.py#L7 - v3.1.1 - 2023-04-17 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pysiaalarm.data.data", - # https://pypi.org/project/pybotvac/ - v0.0.25 - 2024-04-11 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pybotvac.version", - # https://github.com/home-assistant-ecosystem/python-mystrom/blob/2.2.0/pymystrom/__init__.py#L10 - v2.2.0 - 2023-05-21 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pymystrom", + # -- other + # Locale changes might take some time to resolve upstream + # https://github.com/Squachen/micloud/blob/v_0.6/micloud/micloud.py#L35 - v0.6 - 2022-12-08 + "ignore:'locale.getdefaultlocale' is deprecated and slated for removal in Python 3.15:DeprecationWarning:micloud.micloud", + # https://github.com/MatsNl/pyatag/issues/11 - v0.3.7.1 - 2023-10-09 + "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:pyatag.gateway", + # https://github.com/lidatong/dataclasses-json/issues/328 + # https://github.com/lidatong/dataclasses-json/pull/351 + "ignore:The 'default' argument to fields is deprecated. Use 'dump_default' instead:DeprecationWarning:dataclasses_json.mm", + # https://pypi.org/project/emulated-roku/ - v0.3.0 - 2023-12-19 + # https://github.com/martonperei/emulated_roku + "ignore:loop argument is deprecated:DeprecationWarning:emulated_roku", + # https://github.com/w1ll1am23/pyeconet/blob/v0.1.23/src/pyeconet/api.py#L38 - v0.1.23 - 2024-10-08 + "ignore:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning:pyeconet.api", + # https://github.com/thecynic/pylutron - v0.2.16 - 2024-10-22 + "ignore:setDaemon\\(\\) is deprecated, set the daemon attribute instead:DeprecationWarning:pylutron", + # https://github.com/pschmitt/pynuki/blob/1.6.3/pynuki/utils.py#L21 - v1.6.3 - 2024-02-24 + "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:pynuki.utils", + # https://github.com/lextudio/pysnmp/blob/v7.1.10/pysnmp/smi/compiler.py#L23-L31 - v7.1.10 - 2024-11-04 + "ignore:smiV1Relaxed is deprecated. Please use smi_v1_relaxed instead:DeprecationWarning:pysnmp.smi.compiler", + "ignore:getReadersFromUrls is deprecated. Please use get_readers_from_urls instead:DeprecationWarning:pysmi.reader.url", # wrong stacklevel + # https://github.com/briis/pyweatherflowudp/blob/v1.4.5/pyweatherflowudp/const.py#L20 - v1.4.5 - 2023-10-10 + "ignore:This function will be removed in future versions of pint:DeprecationWarning:pyweatherflowudp.const", + # Wrong stacklevel + # https://bugs.launchpad.net/beautifulsoup/+bug/2034451 fixed in >4.12.3 + "ignore:It looks like you're parsing an XML document using an HTML parser:UserWarning:html.parser", + # New in aiohttp - v3.9.0 + "ignore:It is recommended to use web.AppKey instances for keys:UserWarning:(homeassistant|tests|aiohttp_cors)", + # - SyntaxWarnings + # https://pypi.org/project/aprslib/ - v0.7.2 - 2022-07-10 + "ignore:invalid escape sequence:SyntaxWarning:.*aprslib.parsing.common", + "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:aprslib.parsing.common", + # https://pypi.org/project/panasonic-viera/ - v0.4.2 - 2024-04-24 + # https://github.com/florianholzapfel/panasonic-viera/blob/0.4.2/panasonic_viera/__init__.py#L789 + "ignore:invalid escape sequence:SyntaxWarning:.*panasonic_viera", + # https://pypi.org/project/pyblackbird/ - v0.6 - 2023-03-15 + # https://github.com/koolsb/pyblackbird/pull/9 -> closed + "ignore:invalid escape sequence:SyntaxWarning:.*pyblackbird", + # https://pypi.org/project/pyws66i/ - v1.1 - 2022-04-05 + "ignore:invalid escape sequence:SyntaxWarning:.*pyws66i", + # https://pypi.org/project/sanix/ - v1.0.6 - 2024-05-01 + # https://github.com/tomaszsluszniak/sanix_py/blob/v1.0.6/sanix/__init__.py#L42 + "ignore:invalid escape sequence:SyntaxWarning:.*sanix", + # https://pypi.org/project/sleekxmppfs/ - v1.4.1 - 2022-08-18 + "ignore:invalid escape sequence:SyntaxWarning:.*sleekxmppfs.thirdparty.mini_dateutil", # codespell:ignore thirdparty + # - pkg_resources + # https://pypi.org/project/aiomusiccast/ - v0.14.8 - 2023-03-20 + "ignore:pkg_resources is deprecated as an API:DeprecationWarning:aiomusiccast", + # https://pypi.org/project/habitipy/ - v0.3.3 - 2024-10-28 + "ignore:pkg_resources is deprecated as an API:DeprecationWarning:habitipy.api", + # https://github.com/eavanvalkenburg/pysiaalarm/blob/v3.1.1/src/pysiaalarm/data/data.py#L7 - v3.1.1 - 2023-04-17 + "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pysiaalarm.data.data", + # https://pypi.org/project/pybotvac/ - v0.0.25 - 2024-04-11 + "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pybotvac.version", + # https://github.com/home-assistant-ecosystem/python-mystrom/blob/2.2.0/pymystrom/__init__.py#L10 - v2.2.0 - 2023-05-21 + "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pymystrom", - # -- Python 3.13 - # HomeAssistant - "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:homeassistant.components.assist_pipeline.websocket_api", - "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:homeassistant.components.hddtemp.sensor", - # https://pypi.org/project/nextcord/ - v2.6.0 - 2023-09-23 - # https://github.com/nextcord/nextcord/issues/1174 - # https://github.com/nextcord/nextcord/blob/v2.6.1/nextcord/player.py#L5 - "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:nextcord.player", - # https://pypi.org/project/SpeechRecognition/ - v3.11.0 - 2024-05-05 - # https://github.com/Uberi/speech_recognition/blob/3.11.0/speech_recognition/__init__.py#L7 - "ignore:'aifc' is deprecated and slated for removal in Python 3.13:DeprecationWarning:speech_recognition", - # https://pypi.org/project/voip-utils/ - v0.2.0 - 2024-09-06 - # https://github.com/home-assistant-libs/voip-utils/blob/0.2.0/voip_utils/rtp_audio.py#L3 - "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:voip_utils.rtp_audio", + # -- Python 3.13 + # HomeAssistant + "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:homeassistant.components.assist_pipeline.websocket_api", + "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:homeassistant.components.hddtemp.sensor", + # https://pypi.org/project/nextcord/ - v2.6.0 - 2023-09-23 + # https://github.com/nextcord/nextcord/issues/1174 + # https://github.com/nextcord/nextcord/blob/v2.6.1/nextcord/player.py#L5 + "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:nextcord.player", + # https://pypi.org/project/SpeechRecognition/ - v3.11.0 - 2024-05-05 + # https://github.com/Uberi/speech_recognition/blob/3.11.0/speech_recognition/__init__.py#L7 + "ignore:'aifc' is deprecated and slated for removal in Python 3.13:DeprecationWarning:speech_recognition", + # https://pypi.org/project/voip-utils/ - v0.2.0 - 2024-09-06 + # https://github.com/home-assistant-libs/voip-utils/blob/0.2.0/voip_utils/rtp_audio.py#L3 + "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:voip_utils.rtp_audio", - # -- Python 3.13 - unmaintained projects, last release about 2+ years - # https://pypi.org/project/pydub/ - v0.25.1 - 2021-03-10 - "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:pydub.utils", - # https://github.com/heathbar/plum-lightpad-python/issues/7 - v0.0.11 - 2018-10-16 - "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:plumlightpad.lightpad", - # https://pypi.org/project/pyws66i/ - v1.1 - 2022-04-05 - # https://github.com/ssaenger/pyws66i/blob/v1.1/pyws66i/__init__.py#L2 - "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:pyws66i", + # -- Python 3.13 - unmaintained projects, last release about 2+ years + # https://pypi.org/project/pydub/ - v0.25.1 - 2021-03-10 + "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:pydub.utils", + # https://github.com/heathbar/plum-lightpad-python/issues/7 - v0.0.11 - 2018-10-16 + "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:plumlightpad.lightpad", + # https://pypi.org/project/pyws66i/ - v1.1 - 2022-04-05 + # https://github.com/ssaenger/pyws66i/blob/v1.1/pyws66i/__init__.py#L2 + "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:pyws66i", - # -- New in Python 3.13 - # https://github.com/kurtmckee/feedparser/pull/389 - >6.0.11 - # https://github.com/kurtmckee/feedparser/issues/481 - "ignore:'count' is passed as positional argument:DeprecationWarning:feedparser.html", - # https://github.com/youknowone/python-deadlib - Backports for aifc, telnetlib - "ignore:aifc was removed in Python 3.13.*'standard-aifc':DeprecationWarning:speech_recognition", - "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:homeassistant.components.hddtemp.sensor", - "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:ndms2_client.connection", - "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:plumlightpad.lightpad", - "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:pyws66i", + # -- New in Python 3.13 + # https://github.com/kurtmckee/feedparser/pull/389 - >6.0.11 + # https://github.com/kurtmckee/feedparser/issues/481 + "ignore:'count' is passed as positional argument:DeprecationWarning:feedparser.html", + # https://github.com/youknowone/python-deadlib - Backports for aifc, telnetlib + "ignore:aifc was removed in Python 3.13.*'standard-aifc':DeprecationWarning:speech_recognition", + "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:homeassistant.components.hddtemp.sensor", + "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:ndms2_client.connection", + "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:plumlightpad.lightpad", + "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:pyws66i", - # -- unmaintained projects, last release about 2+ years - # https://pypi.org/project/agent-py/ - v0.0.23 - 2020-06-04 - "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:agent.a", - # https://pypi.org/project/aiomodernforms/ - v0.1.8 - 2021-06-27 - "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:aiomodernforms.modernforms", - # https://pypi.org/project/alarmdecoder/ - v1.13.11 - 2021-06-01 - "ignore:invalid escape sequence:SyntaxWarning:.*alarmdecoder", - # https://pypi.org/project/directv/ - v0.4.0 - 2020-09-12 - "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:directv.directv", - "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:directv.models", - # https://pypi.org/project/foobot_async/ - v1.0.1 - 2024-08-16 - "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:foobot_async", - # https://pypi.org/project/httpsig/ - v1.3.0 - 2018-11-28 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:httpsig", - # https://pypi.org/project/influxdb/ - v5.3.2 - 2024-04-18 (archived) - "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:influxdb.line_protocol", - # https://pypi.org/project/lark-parser/ - v0.12.0 - 2021-08-30 -> moved to `lark` - # https://pypi.org/project/commentjson/ - v0.9.0 - 2020-10-05 - # https://github.com/vaidik/commentjson/issues/51 - # https://github.com/vaidik/commentjson/pull/52 - # Fixed upstream, commentjson depends on old version and seems to be unmaintained - "ignore:module '(sre_parse|sre_constants)' is deprecate:DeprecationWarning:lark.utils", - # https://pypi.org/project/lomond/ - v0.3.3 - 2018-09-21 - "ignore:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning:lomond.session", - # https://pypi.org/project/oauth2client/ - v4.1.3 - 2018-09-07 (archived) - "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:oauth2client.client", - # https://pypi.org/project/opuslib/ - v3.0.1 - 2018-01-16 - "ignore:\"is not\" with 'int' literal. Did you mean \"!=\"?:SyntaxWarning:.*opuslib.api.decoder", - # https://pypi.org/project/passlib/ - v1.7.4 - 2020-10-08 - "ignore:'crypt' is deprecated and slated for removal in Python 3.13:DeprecationWarning:passlib.utils", - # https://pypi.org/project/pilight/ - v0.1.1 - 2016-10-19 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pilight", - # https://pypi.org/project/plumlightpad/ - v0.0.11 - 2018-10-16 - "ignore:invalid escape sequence:SyntaxWarning:.*plumlightpad.plumdiscovery", - "ignore:\"is\" with 'int' literal. Did you mean \"==\"?:SyntaxWarning:.*plumlightpad.(lightpad|logicalload)", - # https://pypi.org/project/pure-python-adb/ - v0.3.0.dev0 - 2020-08-05 - "ignore:invalid escape sequence:SyntaxWarning:.*ppadb", - # https://pypi.org/project/pydub/ - v0.25.1 - 2021-03-10 - "ignore:invalid escape sequence:SyntaxWarning:.*pydub.utils", - # https://pypi.org/project/pyiss/ - v1.0.1 - 2016-12-19 - "ignore:\"is\" with 'int' literal. Did you mean \"==\"?:SyntaxWarning:.*pyiss", - # https://pypi.org/project/PyMetEireann/ - v2021.8.0 - 2021-08-16 - "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:meteireann", - # https://pypi.org/project/PyPasser/ - v0.0.5 - 2021-10-21 - "ignore:invalid escape sequence:SyntaxWarning:.*pypasser.utils", - # https://pypi.org/project/pyqwikswitch/ - v0.94 - 2019-08-19 - "ignore:client.loop property is deprecated:DeprecationWarning:pyqwikswitch.async_", - "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:pyqwikswitch.async_", - # https://pypi.org/project/Rx/ - v3.2.0 - 2021-04-25 - "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:rx.internal.constants", - # https://pypi.org/project/rxv/ - v0.7.0 - 2021-10-10 - "ignore:defusedxml.cElementTree is deprecated, import from defusedxml.ElementTree instead:DeprecationWarning:rxv.ssdp", + # -- unmaintained projects, last release about 2+ years + # https://pypi.org/project/agent-py/ - v0.0.23 - 2020-06-04 + "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:agent.a", + # https://pypi.org/project/aiomodernforms/ - v0.1.8 - 2021-06-27 + "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:aiomodernforms.modernforms", + # https://pypi.org/project/alarmdecoder/ - v1.13.11 - 2021-06-01 + "ignore:invalid escape sequence:SyntaxWarning:.*alarmdecoder", + # https://pypi.org/project/directv/ - v0.4.0 - 2020-09-12 + "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:directv.directv", + "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:directv.models", + # https://pypi.org/project/foobot_async/ - v1.0.1 - 2024-08-16 + "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:foobot_async", + # https://pypi.org/project/httpsig/ - v1.3.0 - 2018-11-28 + "ignore:pkg_resources is deprecated as an API:DeprecationWarning:httpsig", + # https://pypi.org/project/influxdb/ - v5.3.2 - 2024-04-18 (archived) + "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:influxdb.line_protocol", + # https://pypi.org/project/lark-parser/ - v0.12.0 - 2021-08-30 -> moved to `lark` + # https://pypi.org/project/commentjson/ - v0.9.0 - 2020-10-05 + # https://github.com/vaidik/commentjson/issues/51 + # https://github.com/vaidik/commentjson/pull/52 + # Fixed upstream, commentjson depends on old version and seems to be unmaintained + "ignore:module '(sre_parse|sre_constants)' is deprecate:DeprecationWarning:lark.utils", + # https://pypi.org/project/lomond/ - v0.3.3 - 2018-09-21 + "ignore:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning:lomond.session", + # https://pypi.org/project/oauth2client/ - v4.1.3 - 2018-09-07 (archived) + "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:oauth2client.client", + # https://pypi.org/project/opuslib/ - v3.0.1 - 2018-01-16 + "ignore:\"is not\" with 'int' literal. Did you mean \"!=\"?:SyntaxWarning:.*opuslib.api.decoder", + # https://pypi.org/project/passlib/ - v1.7.4 - 2020-10-08 + "ignore:'crypt' is deprecated and slated for removal in Python 3.13:DeprecationWarning:passlib.utils", + # https://pypi.org/project/pilight/ - v0.1.1 - 2016-10-19 + "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pilight", + # https://pypi.org/project/plumlightpad/ - v0.0.11 - 2018-10-16 + "ignore:invalid escape sequence:SyntaxWarning:.*plumlightpad.plumdiscovery", + "ignore:\"is\" with 'int' literal. Did you mean \"==\"?:SyntaxWarning:.*plumlightpad.(lightpad|logicalload)", + # https://pypi.org/project/pure-python-adb/ - v0.3.0.dev0 - 2020-08-05 + "ignore:invalid escape sequence:SyntaxWarning:.*ppadb", + # https://pypi.org/project/pydub/ - v0.25.1 - 2021-03-10 + "ignore:invalid escape sequence:SyntaxWarning:.*pydub.utils", + # https://pypi.org/project/pyiss/ - v1.0.1 - 2016-12-19 + "ignore:\"is\" with 'int' literal. Did you mean \"==\"?:SyntaxWarning:.*pyiss", + # https://pypi.org/project/PyMetEireann/ - v2021.8.0 - 2021-08-16 + "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:meteireann", + # https://pypi.org/project/PyPasser/ - v0.0.5 - 2021-10-21 + "ignore:invalid escape sequence:SyntaxWarning:.*pypasser.utils", + # https://pypi.org/project/pyqwikswitch/ - v0.94 - 2019-08-19 + "ignore:client.loop property is deprecated:DeprecationWarning:pyqwikswitch.async_", + "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:pyqwikswitch.async_", + # https://pypi.org/project/Rx/ - v3.2.0 - 2021-04-25 + "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:rx.internal.constants", + # https://pypi.org/project/rxv/ - v0.7.0 - 2021-10-10 + "ignore:defusedxml.cElementTree is deprecated, import from defusedxml.ElementTree instead:DeprecationWarning:rxv.ssdp", ] [tool.coverage.run] @@ -688,16 +679,16 @@ source = ["homeassistant"] [tool.coverage.report] exclude_lines = [ - # Have to re-enable the standard pragma - "pragma: no cover", - # Don't complain about missing debug-only code: - "def __repr__", - # Don't complain if tests don't hit defensive assertion code: - "raise AssertionError", - "raise NotImplementedError", - # TYPE_CHECKING and @overload blocks are never executed during pytest run - "if TYPE_CHECKING:", - "@overload", + # Have to re-enable the standard pragma + "pragma: no cover", + # Don't complain about missing debug-only code: + "def __repr__", + # Don't complain if tests don't hit defensive assertion code: + "raise AssertionError", + "raise NotImplementedError", + # TYPE_CHECKING and @overload blocks are never executed during pytest run + "if TYPE_CHECKING:", + "@overload", ] [tool.ruff] @@ -705,158 +696,158 @@ required-version = ">=0.11.0" [tool.ruff.lint] select = [ - "A001", # Variable {name} is shadowing a Python builtin - "ASYNC", # flake8-async - "B002", # Python does not support the unary prefix increment - "B005", # Using .strip() with multi-character strings is misleading - "B007", # Loop control variable {name} not used within loop body - "B014", # Exception handler with duplicate exception - "B015", # Pointless comparison. Did you mean to assign a value? Otherwise, prepend assert or remove it. - "B017", # pytest.raises(BaseException) should be considered evil - "B018", # Found useless attribute access. Either assign it to a variable or remove it. - "B023", # Function definition does not bind loop variable {name} - "B024", # `{name}` is an abstract base class, but it has no abstract methods or properties - "B026", # Star-arg unpacking after a keyword argument is strongly discouraged - "B032", # Possible unintentional type annotation (using :). Did you mean to assign (using =)? - "B035", # Dictionary comprehension uses static key - "B904", # Use raise from to specify exception cause - "B905", # zip() without an explicit strict= parameter - "BLE", - "C", # complexity - "COM818", # Trailing comma on bare tuple prohibited - "D", # docstrings - "DTZ003", # Use datetime.now(tz=) instead of datetime.utcnow() - "DTZ004", # Use datetime.fromtimestamp(ts, tz=) instead of datetime.utcfromtimestamp(ts) - "E", # pycodestyle - "F", # pyflakes/autoflake - "F541", # f-string without any placeholders - "FLY", # flynt - "FURB", # refurb - "G", # flake8-logging-format - "I", # isort - "INP", # flake8-no-pep420 - "ISC", # flake8-implicit-str-concat - "ICN001", # import concentions; {name} should be imported as {asname} - "LOG", # flake8-logging - "N804", # First argument of a class method should be named cls - "N805", # First argument of a method should be named self - "N815", # Variable {name} in class scope should not be mixedCase - "PERF", # Perflint - "PGH", # pygrep-hooks - "PIE", # flake8-pie - "PL", # pylint - "PT", # flake8-pytest-style - "PTH", # flake8-pathlib - "PYI", # flake8-pyi - "RET", # flake8-return - "RSE", # flake8-raise - "RUF005", # Consider iterable unpacking instead of concatenation - "RUF006", # Store a reference to the return value of asyncio.create_task - "RUF007", # Prefer itertools.pairwise() over zip() when iterating over successive pairs - "RUF008", # Do not use mutable default values for dataclass attributes - "RUF010", # Use explicit conversion flag - "RUF013", # PEP 484 prohibits implicit Optional - "RUF016", # Slice in indexed access to type {value_type} uses type {index_type} instead of an integer - "RUF017", # Avoid quadratic list summation - "RUF018", # Avoid assignment expressions in assert statements - "RUF019", # Unnecessary key check before dictionary access - "RUF020", # {never_like} | T is equivalent to T - "RUF021", # Parenthesize a and b expressions when chaining and and or together, to make the precedence clear - "RUF022", # Sort __all__ - "RUF023", # Sort __slots__ - "RUF024", # Do not pass mutable objects as values to dict.fromkeys - "RUF026", # default_factory is a positional-only argument to defaultdict - "RUF030", # print() call in assert statement is likely unintentional - "RUF032", # Decimal() called with float literal argument - "RUF033", # __post_init__ method with argument defaults - "RUF034", # Useless if-else condition - "RUF100", # Unused `noqa` directive - "RUF101", # noqa directives that use redirected rule codes - "RUF200", # Failed to parse pyproject.toml: {message} - "S102", # Use of exec detected - "S103", # bad-file-permissions - "S108", # hardcoded-temp-file - "S306", # suspicious-mktemp-usage - "S307", # suspicious-eval-usage - "S313", # suspicious-xmlc-element-tree-usage - "S314", # suspicious-xml-element-tree-usage - "S315", # suspicious-xml-expat-reader-usage - "S316", # suspicious-xml-expat-builder-usage - "S317", # suspicious-xml-sax-usage - "S318", # suspicious-xml-mini-dom-usage - "S319", # suspicious-xml-pull-dom-usage - "S601", # paramiko-call - "S602", # subprocess-popen-with-shell-equals-true - "S604", # call-with-shell-equals-true - "S608", # hardcoded-sql-expression - "S609", # unix-command-wildcard-injection - "SIM", # flake8-simplify - "SLF", # flake8-self - "SLOT", # flake8-slots - "T100", # Trace found: {name} used - "T20", # flake8-print - "TC", # flake8-type-checking - "TID", # Tidy imports - "TRY", # tryceratops - "UP", # pyupgrade - "UP031", # Use format specifiers instead of percent format - "UP032", # Use f-string instead of `format` call - "W", # pycodestyle + "A001", # Variable {name} is shadowing a Python builtin + "ASYNC", # flake8-async + "B002", # Python does not support the unary prefix increment + "B005", # Using .strip() with multi-character strings is misleading + "B007", # Loop control variable {name} not used within loop body + "B014", # Exception handler with duplicate exception + "B015", # Pointless comparison. Did you mean to assign a value? Otherwise, prepend assert or remove it. + "B017", # pytest.raises(BaseException) should be considered evil + "B018", # Found useless attribute access. Either assign it to a variable or remove it. + "B023", # Function definition does not bind loop variable {name} + "B024", # `{name}` is an abstract base class, but it has no abstract methods or properties + "B026", # Star-arg unpacking after a keyword argument is strongly discouraged + "B032", # Possible unintentional type annotation (using :). Did you mean to assign (using =)? + "B035", # Dictionary comprehension uses static key + "B904", # Use raise from to specify exception cause + "B905", # zip() without an explicit strict= parameter + "BLE", + "C", # complexity + "COM818", # Trailing comma on bare tuple prohibited + "D", # docstrings + "DTZ003", # Use datetime.now(tz=) instead of datetime.utcnow() + "DTZ004", # Use datetime.fromtimestamp(ts, tz=) instead of datetime.utcfromtimestamp(ts) + "E", # pycodestyle + "F", # pyflakes/autoflake + "F541", # f-string without any placeholders + "FLY", # flynt + "FURB", # refurb + "G", # flake8-logging-format + "I", # isort + "INP", # flake8-no-pep420 + "ISC", # flake8-implicit-str-concat + "ICN001", # import concentions; {name} should be imported as {asname} + "LOG", # flake8-logging + "N804", # First argument of a class method should be named cls + "N805", # First argument of a method should be named self + "N815", # Variable {name} in class scope should not be mixedCase + "PERF", # Perflint + "PGH", # pygrep-hooks + "PIE", # flake8-pie + "PL", # pylint + "PT", # flake8-pytest-style + "PTH", # flake8-pathlib + "PYI", # flake8-pyi + "RET", # flake8-return + "RSE", # flake8-raise + "RUF005", # Consider iterable unpacking instead of concatenation + "RUF006", # Store a reference to the return value of asyncio.create_task + "RUF007", # Prefer itertools.pairwise() over zip() when iterating over successive pairs + "RUF008", # Do not use mutable default values for dataclass attributes + "RUF010", # Use explicit conversion flag + "RUF013", # PEP 484 prohibits implicit Optional + "RUF016", # Slice in indexed access to type {value_type} uses type {index_type} instead of an integer + "RUF017", # Avoid quadratic list summation + "RUF018", # Avoid assignment expressions in assert statements + "RUF019", # Unnecessary key check before dictionary access + "RUF020", # {never_like} | T is equivalent to T + "RUF021", # Parenthesize a and b expressions when chaining and and or together, to make the precedence clear + "RUF022", # Sort __all__ + "RUF023", # Sort __slots__ + "RUF024", # Do not pass mutable objects as values to dict.fromkeys + "RUF026", # default_factory is a positional-only argument to defaultdict + "RUF030", # print() call in assert statement is likely unintentional + "RUF032", # Decimal() called with float literal argument + "RUF033", # __post_init__ method with argument defaults + "RUF034", # Useless if-else condition + "RUF100", # Unused `noqa` directive + "RUF101", # noqa directives that use redirected rule codes + "RUF200", # Failed to parse pyproject.toml: {message} + "S102", # Use of exec detected + "S103", # bad-file-permissions + "S108", # hardcoded-temp-file + "S306", # suspicious-mktemp-usage + "S307", # suspicious-eval-usage + "S313", # suspicious-xmlc-element-tree-usage + "S314", # suspicious-xml-element-tree-usage + "S315", # suspicious-xml-expat-reader-usage + "S316", # suspicious-xml-expat-builder-usage + "S317", # suspicious-xml-sax-usage + "S318", # suspicious-xml-mini-dom-usage + "S319", # suspicious-xml-pull-dom-usage + "S601", # paramiko-call + "S602", # subprocess-popen-with-shell-equals-true + "S604", # call-with-shell-equals-true + "S608", # hardcoded-sql-expression + "S609", # unix-command-wildcard-injection + "SIM", # flake8-simplify + "SLF", # flake8-self + "SLOT", # flake8-slots + "T100", # Trace found: {name} used + "T20", # flake8-print + "TC", # flake8-type-checking + "TID", # Tidy imports + "TRY", # tryceratops + "UP", # pyupgrade + "UP031", # Use format specifiers instead of percent format + "UP032", # Use f-string instead of `format` call + "W", # pycodestyle ] ignore = [ - "ASYNC109", # Async function definition with a `timeout` parameter Use `asyncio.timeout` instead - "ASYNC110", # Use `asyncio.Event` instead of awaiting `asyncio.sleep` in a `while` loop - "D202", # No blank lines allowed after function docstring - "D203", # 1 blank line required before class docstring - "D213", # Multi-line docstring summary should start at the second line - "D406", # Section name should end with a newline - "D407", # Section name underlining - "E501", # line too long + "ASYNC109", # Async function definition with a `timeout` parameter Use `asyncio.timeout` instead + "ASYNC110", # Use `asyncio.Event` instead of awaiting `asyncio.sleep` in a `while` loop + "D202", # No blank lines allowed after function docstring + "D203", # 1 blank line required before class docstring + "D213", # Multi-line docstring summary should start at the second line + "D406", # Section name should end with a newline + "D407", # Section name underlining + "E501", # line too long - "PLC1901", # {existing} can be simplified to {replacement} as an empty string is falsey; too many false positives - "PLR0911", # Too many return statements ({returns} > {max_returns}) - "PLR0912", # Too many branches ({branches} > {max_branches}) - "PLR0913", # Too many arguments to function call ({c_args} > {max_args}) - "PLR0915", # Too many statements ({statements} > {max_statements}) - "PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable - "PLW2901", # Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target - "PT011", # pytest.raises({exception}) is too broad, set the `match` parameter or use a more specific exception - "PT018", # Assertion should be broken down into multiple parts - "RUF001", # String contains ambiguous unicode character. - "RUF002", # Docstring contains ambiguous unicode character. - "RUF003", # Comment contains ambiguous unicode character. - "RUF015", # Prefer next(...) over single element slice - "SIM102", # Use a single if statement instead of nested if statements - "SIM103", # Return the condition {condition} directly - "SIM108", # Use ternary operator {contents} instead of if-else-block - "SIM115", # Use context handler for opening files + "PLC1901", # {existing} can be simplified to {replacement} as an empty string is falsey; too many false positives + "PLR0911", # Too many return statements ({returns} > {max_returns}) + "PLR0912", # Too many branches ({branches} > {max_branches}) + "PLR0913", # Too many arguments to function call ({c_args} > {max_args}) + "PLR0915", # Too many statements ({statements} > {max_statements}) + "PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable + "PLW2901", # Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target + "PT011", # pytest.raises({exception}) is too broad, set the `match` parameter or use a more specific exception + "PT018", # Assertion should be broken down into multiple parts + "RUF001", # String contains ambiguous unicode character. + "RUF002", # Docstring contains ambiguous unicode character. + "RUF003", # Comment contains ambiguous unicode character. + "RUF015", # Prefer next(...) over single element slice + "SIM102", # Use a single if statement instead of nested if statements + "SIM103", # Return the condition {condition} directly + "SIM108", # Use ternary operator {contents} instead of if-else-block + "SIM115", # Use context handler for opening files - # Moving imports into type-checking blocks can mess with pytest.patch() - "TC001", # Move application import {} into a type-checking block - "TC002", # Move third-party import {} into a type-checking block - "TC003", # Move standard library import {} into a type-checking block - # Quotes for typing.cast generally not necessary, only for performance critical paths - "TC006", # Add quotes to type expression in typing.cast() + # Moving imports into type-checking blocks can mess with pytest.patch() + "TC001", # Move application import {} into a type-checking block + "TC002", # Move third-party import {} into a type-checking block + "TC003", # Move standard library import {} into a type-checking block + # Quotes for typing.cast generally not necessary, only for performance critical paths + "TC006", # Add quotes to type expression in typing.cast() - "TRY003", # Avoid specifying long messages outside the exception class - "TRY400", # Use `logging.exception` instead of `logging.error` - # Ignored due to performance: https://github.com/charliermarsh/ruff/issues/2923 - "UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)` + "TRY003", # Avoid specifying long messages outside the exception class + "TRY400", # Use `logging.exception` instead of `logging.error` + # Ignored due to performance: https://github.com/charliermarsh/ruff/issues/2923 + "UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)` - # May conflict with the formatter, https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules - "W191", - "E111", - "E114", - "E117", - "D206", - "D300", - "Q", - "COM812", - "COM819", + # May conflict with the formatter, https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules + "W191", + "E111", + "E114", + "E117", + "D206", + "D300", + "Q", + "COM812", + "COM819", - # Disabled because ruff does not understand type of __all__ generated by a function - "PLE0605", + # Disabled because ruff does not understand type of __all__ generated by a function + "PLE0605", ] [tool.ruff.lint.flake8-import-conventions.extend-aliases] @@ -932,9 +923,7 @@ mark-parentheses = false [tool.ruff.lint.isort] force-sort-within-sections = true -known-first-party = [ - "homeassistant", -] +known-first-party = ["homeassistant"] combine-as-imports = true split-on-trailing-comma = false diff --git a/requirements.txt b/requirements.txt index 0735e38c89c..b13ef7b02e5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,7 +10,7 @@ aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.2.3 aiohttp-asyncmdnsresolver==0.1.1 aiozoneinfo==0.2.3 -annotatedyaml==0.4.4 +annotatedyaml==0.4.5 astral==2.2 async-interrupt==1.2.2 attrs==25.1.0 @@ -31,9 +31,9 @@ lru-dict==1.3.0 PyJWT==2.10.1 cryptography==44.0.1 Pillow==11.1.0 -propcache==0.3.0 +propcache==0.3.1 pyOpenSSL==25.0.0 -orjson==3.10.15 +orjson==3.10.16 packaging>=23.1 psutil-home-assistant==0.0.1 python-slugify==8.0.4 @@ -43,10 +43,10 @@ securetar==2025.2.1 SQLAlchemy==2.0.39 standard-aifc==3.13.0 standard-telnetlib==3.13.0 -typing-extensions>=4.12.2,<5.0 +typing-extensions>=4.13.0,<5.0 ulid-transform==1.4.0 urllib3>=1.26.5,<2 -uv==0.6.8 +uv==0.6.10 voluptuous==0.15.2 voluptuous-serialize==2.6.0 voluptuous-openapi==0.0.6 diff --git a/requirements_all.txt b/requirements_all.txt index f3397e70bec..df321a5f112 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -45,7 +45,7 @@ ProgettiHWSW==0.1.3 # PyBluez==0.22 # homeassistant.components.cast -PyChromecast==14.0.6 +PyChromecast==14.0.7 # homeassistant.components.flick_electric PyFlick==1.1.3 @@ -84,7 +84,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.57.1 +PySwitchbot==0.58.0 # homeassistant.components.switchmate PySwitchmate==0.5.1 @@ -179,7 +179,7 @@ aioacaia==0.1.14 aioairq==0.4.4 # homeassistant.components.airzone_cloud -aioairzone-cloud==0.6.10 +aioairzone-cloud==0.6.11 # homeassistant.components.airzone aioairzone==0.9.9 @@ -210,10 +210,10 @@ aioazuredevops==2.2.1 aiobafi6==0.9.0 # homeassistant.components.aws -aiobotocore==2.13.1 +aiobotocore==2.21.1 # homeassistant.components.comelit -aiocomelit==0.11.2 +aiocomelit==0.11.3 # homeassistant.components.dhcp aiodhcpwatcher==1.1.1 @@ -225,7 +225,7 @@ aiodiscover==2.6.1 aiodns==3.2.0 # homeassistant.components.duke_energy -aiodukeenergy==0.2.2 +aiodukeenergy==0.3.0 # homeassistant.components.eafm aioeafm==0.1.2 @@ -267,7 +267,7 @@ aiohasupervisor==0.3.0 aiohomeconnect==0.16.3 # homeassistant.components.homekit_controller -aiohomekit==3.2.8 +aiohomekit==3.2.13 # homeassistant.components.mcp_server aiohttp_sse==2.2.0 @@ -422,7 +422,7 @@ aiowaqi==3.1.0 aiowatttime==0.1.1 # homeassistant.components.webdav -aiowebdav2==0.4.2 +aiowebdav2==0.4.4 # homeassistant.components.webostv aiowebostv==0.7.3 @@ -464,7 +464,7 @@ amcrest==1.9.8 androidtv[async]==0.0.75 # homeassistant.components.androidtv_remote -androidtvremote2==0.2.0 +androidtvremote2==0.2.1 # homeassistant.components.anel_pwrctrl anel-pwrctrl-homeassistant==0.0.1.dev2 @@ -491,7 +491,7 @@ apprise==1.9.1 aprslib==0.7.2 # homeassistant.components.apsystems -apsystems-ez1==2.4.0 +apsystems-ez1==2.5.0 # homeassistant.components.aqualogic aqualogic==2.6 @@ -627,7 +627,6 @@ bluecurrent-api==1.2.3 bluemaestro-ble==0.2.3 # homeassistant.components.decora -# homeassistant.components.zengge # bluepy==1.3.0 # homeassistant.components.bluetooth @@ -645,15 +644,18 @@ bluetooth-data-tools==1.26.1 # homeassistant.components.bond bond-async==0.2.1 +# homeassistant.components.bosch_alarm +bosch-alarm-mode2==0.4.3 + # homeassistant.components.bosch_shc boschshcpy==0.2.91 # homeassistant.components.amazon_polly # homeassistant.components.route53 -boto3==1.34.131 +boto3==1.37.1 # homeassistant.components.aws -botocore==1.34.131 +botocore==1.37.1 # homeassistant.components.bring bring-api==1.1.0 @@ -756,7 +758,7 @@ debugpy==1.8.13 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==12.3.1 +deebot-client==12.4.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns @@ -1033,13 +1035,13 @@ google-api-python-client==2.71.0 google-cloud-pubsub==2.29.0 # homeassistant.components.google_cloud -google-cloud-speech==2.27.0 +google-cloud-speech==2.31.1 # homeassistant.components.google_cloud -google-cloud-texttospeech==2.17.2 +google-cloud-texttospeech==2.25.1 # homeassistant.components.google_generative_ai_conversation -google-genai==1.1.0 +google-genai==1.7.0 # homeassistant.components.nest google-nest-sdm==7.1.4 @@ -1152,13 +1154,13 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.68 +holidays==0.69 # homeassistant.components.frontend -home-assistant-frontend==20250306.0 +home-assistant-frontend==20250328.0 # homeassistant.components.conversation -home-assistant-intents==2025.3.23 +home-assistant-intents==2025.3.28 # homeassistant.components.homematicip_cloud homematicip==1.1.7 @@ -1182,7 +1184,7 @@ hyperion-py==0.7.5 iammeter==0.2.1 # homeassistant.components.iaqualink -iaqualink==0.5.0 +iaqualink==0.5.3 # homeassistant.components.ibeacon ibeacon-ble==1.2.0 @@ -1194,7 +1196,7 @@ ibmiotf==0.3.4 # homeassistant.components.local_calendar # homeassistant.components.local_todo # homeassistant.components.remote_calendar -ical==9.0.1 +ical==9.0.3 # homeassistant.components.caldav icalendar==6.1.0 @@ -1218,7 +1220,7 @@ igloohome-api==0.1.0 ihcsdk==2.8.5 # homeassistant.components.imgw_pib -imgw_pib==1.0.9 +imgw_pib==1.0.10 # homeassistant.components.incomfort incomfort-client==0.6.7 @@ -1403,7 +1405,7 @@ messagebird==1.2.0 meteoalertapi==0.3.1 # homeassistant.components.meteo_france -meteofrance-api==1.3.0 +meteofrance-api==1.4.0 # homeassistant.components.mfi mficlient==0.5.0 @@ -1451,7 +1453,7 @@ mozart-api==4.1.1.116.4 mullvad-api==1.0.0 # homeassistant.components.music_assistant -music-assistant-client==1.1.1 +music-assistant-client==1.2.0 # homeassistant.components.tts mutagen==1.47.0 @@ -1673,7 +1675,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==1.7.2 +plugwise==1.7.3 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 @@ -1708,7 +1710,7 @@ proxmoxer==2.0.1 psutil-home-assistant==0.0.1 # homeassistant.components.systemmonitor -psutil==6.1.1 +psutil==7.0.0 # homeassistant.components.pulseaudio_loopback pulsectl==23.5.2 @@ -1720,7 +1722,7 @@ pushbullet.py==0.11.0 pushover_complete==1.1.1 # homeassistant.components.pvoutput -pvo==2.2.0 +pvo==2.2.1 # homeassistant.components.aosmith py-aosmith==1.0.12 @@ -1734,6 +1736,9 @@ py-ccm15==0.0.9 # homeassistant.components.cpuspeed py-cpuinfo==9.0.0 +# homeassistant.components.pterodactyl +py-dactyl==2.0.4 + # homeassistant.components.dormakaba_dkey py-dormakaba-dkey==1.0.5 @@ -1913,7 +1918,7 @@ pydoods==1.0.2 pydrawise==2025.3.0 # homeassistant.components.android_ip_webcam -pydroid-ipcam==2.0.0 +pydroid-ipcam==3.0.0 # homeassistant.components.ebox pyebox==1.1.4 @@ -2042,7 +2047,7 @@ pyiskra==0.1.15 pyiss==1.0.1 # homeassistant.components.isy994 -pyisy==3.1.14 +pyisy==3.4.0 # homeassistant.components.itach pyitachip2ir==0.0.7 @@ -2203,7 +2208,7 @@ pyotgw==2.2.2 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.16.4 +pyoverkiz==1.16.5 # homeassistant.components.onewire pyownet==0.10.0.post1 @@ -2284,7 +2289,7 @@ pyschlage==2024.11.0 pysensibo==1.1.0 # homeassistant.components.serial -pyserial-asyncio-fast==0.14 +pyserial-asyncio-fast==0.16 # homeassistant.components.acer_projector # homeassistant.components.crownstone @@ -2314,7 +2319,7 @@ pysma==0.7.5 pysmappee==0.2.29 # homeassistant.components.smartthings -pysmartthings==2.7.4 +pysmartthings==3.0.1 # homeassistant.components.smarty pysmarty2==0.10.2 @@ -2326,7 +2331,7 @@ pysmhi==1.0.0 pysml==0.0.12 # homeassistant.components.smlight -pysmlight==0.2.3 +pysmlight==0.2.4 # homeassistant.components.snmp pysnmp==6.2.6 @@ -2425,7 +2430,7 @@ python-juicenet==1.1.0 python-kasa[speedups]==0.10.2 # homeassistant.components.linkplay -python-linkplay==0.2.1 +python-linkplay==0.2.2 # homeassistant.components.lirc # python-lirc==1.2.3 @@ -2456,7 +2461,7 @@ python-otbr-api==2.7.0 python-overseerr==0.7.1 # homeassistant.components.picnic -python-picnic-api2==1.2.2 +python-picnic-api2==1.2.4 # homeassistant.components.rabbitair python-rabbitair==0.0.8 @@ -2471,13 +2476,13 @@ python-roborock==2.16.1 python-smarttub==0.0.39 # homeassistant.components.snoo -python-snoo==0.6.4 +python-snoo==0.6.5 # homeassistant.components.songpal python-songpal==0.16.2 # homeassistant.components.tado -python-tado==0.18.6 +python-tado==0.18.9 # homeassistant.components.technove python-technove==2.0.0 @@ -2559,7 +2564,7 @@ pywemo==1.4.0 pywilight==0.0.74 # homeassistant.components.wiz -pywizlight==0.5.14 +pywizlight==0.6.2 # homeassistant.components.wmspro pywmspro==0.2.1 @@ -2622,7 +2627,7 @@ renault-api==0.2.9 renson-endura-delta==1.7.2 # homeassistant.components.reolink -reolink-aio==0.12.3 +reolink-aio==0.13.0 # homeassistant.components.idteck_prox rfk101py==0.0.1 @@ -2825,9 +2830,6 @@ stringcase==1.2.0 # homeassistant.components.subaru subarulink==0.7.13 -# homeassistant.components.sunweg -sunweg==3.0.2 - # homeassistant.components.surepetcare surepy==0.9.0 @@ -2876,7 +2878,7 @@ temperusb==1.6.1 # homeassistant.components.tesla_fleet # homeassistant.components.teslemetry # homeassistant.components.tessie -tesla-fleet-api==0.9.13 +tesla-fleet-api==1.0.16 # homeassistant.components.powerwall tesla-powerwall==0.5.2 @@ -3126,7 +3128,7 @@ yeelight==0.7.16 yeelightsunflower==0.0.10 # homeassistant.components.yolink -yolink-api==0.4.8 +yolink-api==0.4.9 # homeassistant.components.youless youless-api==2.2.0 @@ -3135,7 +3137,7 @@ youless-api==2.2.0 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp[default]==2025.02.19 +yt-dlp[default]==2025.03.26 # homeassistant.components.zabbix zabbix-utils==2.0.2 @@ -3143,9 +3145,6 @@ zabbix-utils==2.0.2 # homeassistant.components.zamg zamg==0.3.6 -# homeassistant.components.zengge -zengge==0.2 - # homeassistant.components.zeroconf zeroconf==0.146.0 @@ -3153,7 +3152,7 @@ zeroconf==0.146.0 zeversolar==0.3.2 # homeassistant.components.zha -zha==0.0.53 +zha==0.0.54 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.13 diff --git a/requirements_test.txt b/requirements_test.txt index baf72265c40..c7bb9b11b87 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -12,13 +12,13 @@ coverage==7.6.12 freezegun==1.5.1 license-expression==30.4.1 mock-open==1.4.0 -mypy-dev==1.16.0a5 +mypy-dev==1.16.0a7 pre-commit==4.0.0 pydantic==2.10.6 pylint==3.3.6 pylint-per-file-ignores==1.4.0 pipdeptree==2.25.1 -pytest-asyncio==0.25.3 +pytest-asyncio==0.26.0 pytest-aiohttp==1.1.0 pytest-cov==6.0.0 pytest-freezer==0.4.9 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 92ec683dcdb..4b8df3aa1a8 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -42,7 +42,7 @@ PlexAPI==4.15.16 ProgettiHWSW==0.1.3 # homeassistant.components.cast -PyChromecast==14.0.6 +PyChromecast==14.0.7 # homeassistant.components.flick_electric PyFlick==1.1.3 @@ -81,7 +81,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.57.1 +PySwitchbot==0.58.0 # homeassistant.components.syncthru PySyncThru==0.8.0 @@ -167,7 +167,7 @@ aioacaia==0.1.14 aioairq==0.4.4 # homeassistant.components.airzone_cloud -aioairzone-cloud==0.6.10 +aioairzone-cloud==0.6.11 # homeassistant.components.airzone aioairzone==0.9.9 @@ -198,10 +198,10 @@ aioazuredevops==2.2.1 aiobafi6==0.9.0 # homeassistant.components.aws -aiobotocore==2.13.1 +aiobotocore==2.21.1 # homeassistant.components.comelit -aiocomelit==0.11.2 +aiocomelit==0.11.3 # homeassistant.components.dhcp aiodhcpwatcher==1.1.1 @@ -213,7 +213,7 @@ aiodiscover==2.6.1 aiodns==3.2.0 # homeassistant.components.duke_energy -aiodukeenergy==0.2.2 +aiodukeenergy==0.3.0 # homeassistant.components.eafm aioeafm==0.1.2 @@ -252,7 +252,7 @@ aiohasupervisor==0.3.0 aiohomeconnect==0.16.3 # homeassistant.components.homekit_controller -aiohomekit==3.2.8 +aiohomekit==3.2.13 # homeassistant.components.mcp_server aiohttp_sse==2.2.0 @@ -404,7 +404,7 @@ aiowaqi==3.1.0 aiowatttime==0.1.1 # homeassistant.components.webdav -aiowebdav2==0.4.2 +aiowebdav2==0.4.4 # homeassistant.components.webostv aiowebostv==0.7.3 @@ -440,7 +440,7 @@ amberelectric==2.0.12 androidtv[async]==0.0.75 # homeassistant.components.androidtv_remote -androidtvremote2==0.2.0 +androidtvremote2==0.2.1 # homeassistant.components.anova anova-wifi==0.17.0 @@ -464,7 +464,7 @@ apprise==1.9.1 aprslib==0.7.2 # homeassistant.components.apsystems -apsystems-ez1==2.4.0 +apsystems-ez1==2.5.0 # homeassistant.components.aranet aranet4==2.5.1 @@ -569,11 +569,14 @@ bluetooth-data-tools==1.26.1 # homeassistant.components.bond bond-async==0.2.1 +# homeassistant.components.bosch_alarm +bosch-alarm-mode2==0.4.3 + # homeassistant.components.bosch_shc boschshcpy==0.2.91 # homeassistant.components.aws -botocore==1.34.131 +botocore==1.37.1 # homeassistant.components.bring bring-api==1.1.0 @@ -646,7 +649,7 @@ dbus-fast==2.43.0 debugpy==1.8.13 # homeassistant.components.ecovacs -deebot-client==12.3.1 +deebot-client==12.4.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns @@ -883,13 +886,13 @@ google-api-python-client==2.71.0 google-cloud-pubsub==2.29.0 # homeassistant.components.google_cloud -google-cloud-speech==2.27.0 +google-cloud-speech==2.31.1 # homeassistant.components.google_cloud -google-cloud-texttospeech==2.17.2 +google-cloud-texttospeech==2.25.1 # homeassistant.components.google_generative_ai_conversation -google-genai==1.1.0 +google-genai==1.7.0 # homeassistant.components.nest google-nest-sdm==7.1.4 @@ -978,13 +981,13 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.68 +holidays==0.69 # homeassistant.components.frontend -home-assistant-frontend==20250306.0 +home-assistant-frontend==20250328.0 # homeassistant.components.conversation -home-assistant-intents==2025.3.23 +home-assistant-intents==2025.3.28 # homeassistant.components.homematicip_cloud homematicip==1.1.7 @@ -1002,7 +1005,7 @@ huum==0.7.12 hyperion-py==0.7.5 # homeassistant.components.iaqualink -iaqualink==0.5.0 +iaqualink==0.5.3 # homeassistant.components.ibeacon ibeacon-ble==1.2.0 @@ -1011,7 +1014,7 @@ ibeacon-ble==1.2.0 # homeassistant.components.local_calendar # homeassistant.components.local_todo # homeassistant.components.remote_calendar -ical==9.0.1 +ical==9.0.3 # homeassistant.components.caldav icalendar==6.1.0 @@ -1029,7 +1032,7 @@ ifaddr==0.2.0 igloohome-api==0.1.0 # homeassistant.components.imgw_pib -imgw_pib==1.0.9 +imgw_pib==1.0.10 # homeassistant.components.incomfort incomfort-client==0.6.7 @@ -1172,7 +1175,7 @@ medcom-ble==0.1.1 melnor-bluetooth==0.0.25 # homeassistant.components.meteo_france -meteofrance-api==1.3.0 +meteofrance-api==1.4.0 # homeassistant.components.mfi mficlient==0.5.0 @@ -1220,7 +1223,7 @@ mozart-api==4.1.1.116.4 mullvad-api==1.0.0 # homeassistant.components.music_assistant -music-assistant-client==1.1.1 +music-assistant-client==1.2.0 # homeassistant.components.tts mutagen==1.47.0 @@ -1383,7 +1386,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==1.7.2 +plugwise==1.7.3 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 @@ -1409,7 +1412,7 @@ prometheus-client==0.21.0 psutil-home-assistant==0.0.1 # homeassistant.components.systemmonitor -psutil==6.1.1 +psutil==7.0.0 # homeassistant.components.pushbullet pushbullet.py==0.11.0 @@ -1418,7 +1421,7 @@ pushbullet.py==0.11.0 pushover_complete==1.1.1 # homeassistant.components.pvoutput -pvo==2.2.0 +pvo==2.2.1 # homeassistant.components.aosmith py-aosmith==1.0.12 @@ -1432,6 +1435,9 @@ py-ccm15==0.0.9 # homeassistant.components.cpuspeed py-cpuinfo==9.0.0 +# homeassistant.components.pterodactyl +py-dactyl==2.0.4 + # homeassistant.components.dormakaba_dkey py-dormakaba-dkey==1.0.5 @@ -1560,7 +1566,7 @@ pydiscovergy==3.0.2 pydrawise==2025.3.0 # homeassistant.components.android_ip_webcam -pydroid-ipcam==2.0.0 +pydroid-ipcam==3.0.0 # homeassistant.components.ecoforest pyecoforest==0.4.0 @@ -1662,7 +1668,7 @@ pyiskra==0.1.15 pyiss==1.0.1 # homeassistant.components.isy994 -pyisy==3.1.14 +pyisy==3.4.0 # homeassistant.components.ituran pyituran==0.1.4 @@ -1796,7 +1802,7 @@ pyotgw==2.2.2 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.16.4 +pyoverkiz==1.16.5 # homeassistant.components.onewire pyownet==0.10.0.post1 @@ -1883,7 +1889,7 @@ pysma==0.7.5 pysmappee==0.2.29 # homeassistant.components.smartthings -pysmartthings==2.7.4 +pysmartthings==3.0.1 # homeassistant.components.smarty pysmarty2==0.10.2 @@ -1895,7 +1901,7 @@ pysmhi==1.0.0 pysml==0.0.12 # homeassistant.components.smlight -pysmlight==0.2.3 +pysmlight==0.2.4 # homeassistant.components.snmp pysnmp==6.2.6 @@ -1961,7 +1967,7 @@ python-juicenet==1.1.0 python-kasa[speedups]==0.10.2 # homeassistant.components.linkplay -python-linkplay==0.2.1 +python-linkplay==0.2.2 # homeassistant.components.matter python-matter-server==7.0.0 @@ -1989,7 +1995,7 @@ python-otbr-api==2.7.0 python-overseerr==0.7.1 # homeassistant.components.picnic -python-picnic-api2==1.2.2 +python-picnic-api2==1.2.4 # homeassistant.components.rabbitair python-rabbitair==0.0.8 @@ -2001,13 +2007,13 @@ python-roborock==2.16.1 python-smarttub==0.0.39 # homeassistant.components.snoo -python-snoo==0.6.4 +python-snoo==0.6.5 # homeassistant.components.songpal python-songpal==0.16.2 # homeassistant.components.tado -python-tado==0.18.6 +python-tado==0.18.9 # homeassistant.components.technove python-technove==2.0.0 @@ -2074,7 +2080,7 @@ pywemo==1.4.0 pywilight==0.0.74 # homeassistant.components.wiz -pywizlight==0.5.14 +pywizlight==0.6.2 # homeassistant.components.wmspro pywmspro==0.2.1 @@ -2122,7 +2128,7 @@ renault-api==0.2.9 renson-endura-delta==1.7.2 # homeassistant.components.reolink -reolink-aio==0.12.3 +reolink-aio==0.13.0 # homeassistant.components.rflink rflink==0.0.66 @@ -2283,9 +2289,6 @@ stringcase==1.2.0 # homeassistant.components.subaru subarulink==0.7.13 -# homeassistant.components.sunweg -sunweg==3.0.2 - # homeassistant.components.surepetcare surepy==0.9.0 @@ -2313,7 +2316,7 @@ temperusb==1.6.1 # homeassistant.components.tesla_fleet # homeassistant.components.teslemetry # homeassistant.components.tessie -tesla-fleet-api==0.9.13 +tesla-fleet-api==1.0.16 # homeassistant.components.powerwall tesla-powerwall==0.5.2 @@ -2518,7 +2521,7 @@ yalexs==8.10.0 yeelight==0.7.16 # homeassistant.components.yolink -yolink-api==0.4.8 +yolink-api==0.4.9 # homeassistant.components.youless youless-api==2.2.0 @@ -2527,7 +2530,7 @@ youless-api==2.2.0 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp[default]==2025.02.19 +yt-dlp[default]==2025.03.26 # homeassistant.components.zamg zamg==0.3.6 @@ -2539,7 +2542,7 @@ zeroconf==0.146.0 zeversolar==0.3.2 # homeassistant.components.zha -zha==0.0.53 +zha==0.0.54 # homeassistant.components.zwave_js zwave-js-server-python==0.62.0 diff --git a/script/hassfest/config_flow.py b/script/hassfest/config_flow.py index f842ec61b97..1f8b7d1139b 100644 --- a/script/hassfest/config_flow.py +++ b/script/hassfest/config_flow.py @@ -95,7 +95,6 @@ def _populate_brand_integrations( integration = integrations.get(domain) if not integration or integration.integration_type in ( "entity", - "hardware", "system", ): continue @@ -171,7 +170,7 @@ def _generate_integrations( result["integration"][domain] = metadata else: # integration integration = integrations[domain] - if integration.integration_type in ("entity", "system", "hardware"): + if integration.integration_type in ("entity", "system"): continue if integration.translated_name: diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index c4f66faafb0..bfdb61096b6 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -14,7 +14,7 @@ WORKDIR "/github/workspace" COPY . /usr/src/homeassistant # Uv is only needed during build -RUN --mount=from=ghcr.io/astral-sh/uv:0.6.8,source=/uv,target=/bin/uv \ +RUN --mount=from=ghcr.io/astral-sh/uv:0.6.10,source=/uv,target=/bin/uv \ # Uv creates a lock file in /tmp --mount=type=tmpfs,target=/tmp \ # Required for PyTurboJPEG @@ -25,7 +25,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.6.8,source=/uv,target=/bin/uv \ -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ -r /usr/src/homeassistant/requirements.txt \ stdlib-list==0.10.0 pipdeptree==2.25.1 tqdm==4.67.1 ruff==0.11.0 \ - PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.2.3 home-assistant-intents==2025.3.23 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 + PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.2.3 home-assistant-intents==2025.3.28 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 LABEL "name"="hassfest" LABEL "maintainer"="Home Assistant " diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index d74011801d5..fdcbe16f092 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -896,7 +896,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "sfr_box", "sharkiq", "shell_command", - "shelly", "shodan", "shopping_list", "sia", @@ -1927,7 +1926,6 @@ INTEGRATIONS_WITHOUT_SCALE = [ "risco", "rituals_perfume_genie", "rmvtransport", - "roborock", "rocketchat", "roku", "romy", diff --git a/tests/components/ambient_network/snapshots/test_sensor.ambr b/tests/components/ambient_network/snapshots/test_sensor.ambr index 8637471cc60..ddf05c99b88 100644 --- a/tests/components/ambient_network/snapshots/test_sensor.ambr +++ b/tests/components/ambient_network/snapshots/test_sensor.ambr @@ -815,7 +815,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'config_subentry_id': , 'device_class': None, @@ -854,6 +856,7 @@ 'device_class': 'wind_direction', 'friendly_name': 'Station A Wind direction', 'last_measured': HAFakeDatetime(2023, 11, 8, 12, 12, 0, 914000, tzinfo=zoneinfo.ZoneInfo(key='US/Pacific')), + 'state_class': , 'unit_of_measurement': '°', }), 'context': , @@ -1800,7 +1803,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'config_subentry_id': , 'device_class': None, @@ -1839,6 +1844,7 @@ 'device_class': 'wind_direction', 'friendly_name': 'Station C Wind direction', 'last_measured': HAFakeDatetime(2024, 6, 6, 8, 28, 3, tzinfo=zoneinfo.ZoneInfo(key='US/Pacific')), + 'state_class': , 'unit_of_measurement': '°', }), 'context': , @@ -2722,7 +2728,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'config_subentry_id': , 'device_class': None, @@ -2760,6 +2768,7 @@ 'attribution': 'Data provided by ambientnetwork.net', 'device_class': 'wind_direction', 'friendly_name': 'Station D Wind direction', + 'state_class': , 'unit_of_measurement': '°', }), 'context': , diff --git a/tests/components/analytics_insights/fixtures/current_data.json b/tests/components/analytics_insights/fixtures/current_data.json index c652a8c0154..ff1baca49ed 100644 --- a/tests/components/analytics_insights/fixtures/current_data.json +++ b/tests/components/analytics_insights/fixtures/current_data.json @@ -1050,7 +1050,6 @@ "melnor": 42, "plaato": 45, "freedompro": 26, - "sunweg": 3, "logi_circle": 18, "proxy": 16, "statsd": 4, diff --git a/tests/components/apsystems/conftest.py b/tests/components/apsystems/conftest.py index 92af6885c0b..d1c97e991a8 100644 --- a/tests/components/apsystems/conftest.py +++ b/tests/components/apsystems/conftest.py @@ -43,6 +43,7 @@ def mock_apsystems() -> Generator[MagicMock]: ipAddr="127.0.01", minPower=0, maxPower=1000, + isBatterySystem=False, ) mock_api.get_output_data.return_value = ReturnOutputData( p1=2.0, diff --git a/tests/components/assist_pipeline/test_pipeline.py b/tests/components/assist_pipeline/test_pipeline.py index a7f6fbf7553..d67a0fd1726 100644 --- a/tests/components/assist_pipeline/test_pipeline.py +++ b/tests/components/assist_pipeline/test_pipeline.py @@ -684,7 +684,7 @@ def test_fallback_intent_filter() -> None: entities_list=[], ) ) - is True + is False ) assert ( _async_local_fallback_intent_filter( diff --git a/tests/components/assist_satellite/test_entity.py b/tests/components/assist_satellite/test_entity.py index 6604fdc3f25..2b1cc78943f 100644 --- a/tests/components/assist_satellite/test_entity.py +++ b/tests/components/assist_satellite/test_entity.py @@ -22,6 +22,7 @@ from homeassistant.components.assist_satellite import ( AssistSatelliteAnnouncement, SatelliteBusyError, ) +from homeassistant.components.assist_satellite.const import PREANNOUNCE_URL from homeassistant.components.assist_satellite.entity import AssistSatelliteState from homeassistant.components.media_source import PlayMedia from homeassistant.config_entries import ConfigEntry @@ -185,7 +186,7 @@ async def test_new_pipeline_cancels_pipeline( ("service_data", "expected_params"), [ ( - {"message": "Hello"}, + {"message": "Hello", "preannounce_media_id": None}, AssistSatelliteAnnouncement( message="Hello", media_id="http://10.10.10.10:8123/api/tts_proxy/test-token", @@ -198,6 +199,7 @@ async def test_new_pipeline_cancels_pipeline( { "message": "Hello", "media_id": "media-source://given", + "preannounce_media_id": None, }, AssistSatelliteAnnouncement( message="Hello", @@ -208,7 +210,7 @@ async def test_new_pipeline_cancels_pipeline( ), ), ( - {"media_id": "http://example.com/bla.mp3"}, + {"media_id": "http://example.com/bla.mp3", "preannounce_media_id": None}, AssistSatelliteAnnouncement( message="", media_id="http://example.com/bla.mp3", @@ -217,6 +219,20 @@ async def test_new_pipeline_cancels_pipeline( media_id_source="url", ), ), + ( + { + "media_id": "http://example.com/bla.mp3", + "preannounce_media_id": "http://example.com/preannounce.mp3", + }, + AssistSatelliteAnnouncement( + message="", + media_id="http://example.com/bla.mp3", + original_media_id="http://example.com/bla.mp3", + tts_token=None, + media_id_source="url", + preannounce_media_id="http://example.com/preannounce.mp3", + ), + ), ], ) async def test_announce( @@ -354,6 +370,24 @@ async def test_announce_cancels_pipeline( mock_async_announce.assert_called_once() +async def test_announce_default_preannounce( + hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite +) -> None: + """Test announcing on a device with the default preannouncement sound.""" + + async def async_announce(announcement): + assert announcement.preannounce_media_id.endswith(PREANNOUNCE_URL) + + with patch.object(entity, "async_announce", new=async_announce): + await hass.services.async_call( + "assist_satellite", + "announce", + {"media_id": "test-media-id"}, + target={"entity_id": "assist_satellite.test_entity"}, + blocking=True, + ) + + async def test_context_refresh( hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite ) -> None: @@ -507,6 +541,7 @@ async def test_vad_sensitivity_entity_not_found( { "start_message": "Hello", "extra_system_prompt": "Better system prompt", + "preannounce_media_id": None, }, ( "mock-conversation-id", @@ -524,6 +559,7 @@ async def test_vad_sensitivity_entity_not_found( { "start_message": "Hello", "start_media_id": "media-source://given", + "preannounce_media_id": None, }, ( "mock-conversation-id", @@ -538,7 +574,10 @@ async def test_vad_sensitivity_entity_not_found( ), ), ( - {"start_media_id": "http://example.com/given.mp3"}, + { + "start_media_id": "http://example.com/given.mp3", + "preannounce_media_id": None, + }, ( "mock-conversation-id", None, @@ -551,6 +590,24 @@ async def test_vad_sensitivity_entity_not_found( ), ), ), + ( + { + "start_media_id": "http://example.com/given.mp3", + "preannounce_media_id": "http://example.com/preannounce.mp3", + }, + ( + "mock-conversation-id", + None, + AssistSatelliteAnnouncement( + message="", + media_id="http://example.com/given.mp3", + tts_token=None, + original_media_id="http://example.com/given.mp3", + media_id_source="url", + preannounce_media_id="http://example.com/preannounce.mp3", + ), + ), + ), ], ) @pytest.mark.usefixtures("mock_chat_session_conversation_id") @@ -562,6 +619,13 @@ async def test_start_conversation( expected_params: tuple[str, str], ) -> None: """Test starting a conversation on a device.""" + original_start_conversation = entity.async_start_conversation + + async def async_start_conversation(start_announcement): + # Verify state change + assert entity.state == AssistSatelliteState.RESPONDING + await original_start_conversation(start_announcement) + await async_update_pipeline( hass, async_get_pipeline(hass), @@ -588,6 +652,7 @@ async def test_start_conversation( mime_type="audio/mp3", ), ), + patch.object(entity, "async_start_conversation", new=async_start_conversation), ): await hass.services.async_call( "assist_satellite", @@ -596,6 +661,7 @@ async def test_start_conversation( target={"entity_id": "assist_satellite.test_entity"}, blocking=True, ) + assert entity.state == AssistSatelliteState.IDLE assert entity.start_conversations[0] == expected_params @@ -616,6 +682,32 @@ async def test_start_conversation_reject_builtin_agent( ) +async def test_start_conversation_default_preannounce( + hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite +) -> None: + """Test starting a conversation on a device with the default preannouncement sound.""" + + async def async_start_conversation(start_announcement): + assert PREANNOUNCE_URL in start_announcement.preannounce_media_id + + await async_update_pipeline( + hass, + async_get_pipeline(hass), + conversation_engine="conversation.some_llm", + ) + + with ( + patch.object(entity, "async_start_conversation", new=async_start_conversation), + ): + await hass.services.async_call( + "assist_satellite", + "start_conversation", + {"start_media_id": "test-media-id"}, + target={"entity_id": "assist_satellite.test_entity"}, + blocking=True, + ) + + async def test_wake_word_start_keeps_responding( hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite ) -> None: diff --git a/tests/components/assist_satellite/test_websocket_api.py b/tests/components/assist_satellite/test_websocket_api.py index f0a8f02fc50..23eec7e8461 100644 --- a/tests/components/assist_satellite/test_websocket_api.py +++ b/tests/components/assist_satellite/test_websocket_api.py @@ -445,6 +445,7 @@ async def test_connection_test( assert len(entity.announcements) == 1 assert entity.announcements[0].message == "" + assert entity.announcements[0].preannounce_media_id is None announcement_media_id = entity.announcements[0].media_id hass_url = "http://10.10.10.10:8123" assert announcement_media_id.startswith( diff --git a/tests/components/awair/test_sensor.py b/tests/components/awair/test_sensor.py index 8c9cd6e3a24..040deaf8f80 100644 --- a/tests/components/awair/test_sensor.py +++ b/tests/components/awair/test_sensor.py @@ -127,7 +127,7 @@ async def test_awair_gen1_sensors( assert_expected_properties( hass, entity_registry, - "sensor.living_room_vocs", + "sensor.living_room_volatile_organic_compounds_parts", f"{AWAIR_UUID}_{SENSOR_TYPES_MAP[API_VOC].unique_id_tag}", "366", { diff --git a/tests/components/backup/common.py b/tests/components/backup/common.py index e6e4b2f8a50..3197cbfadeb 100644 --- a/tests/components/backup/common.py +++ b/tests/components/backup/common.py @@ -2,9 +2,9 @@ from __future__ import annotations -from collections.abc import AsyncIterator, Callable, Coroutine, Iterable +from collections.abc import AsyncIterator, Buffer, Callable, Coroutine, Iterable from pathlib import Path -from typing import Any +from typing import Any, cast from unittest.mock import AsyncMock, Mock, patch from homeassistant.components.backup import ( @@ -16,6 +16,7 @@ from homeassistant.components.backup import ( BackupNotFound, Folder, ) +from homeassistant.components.backup.backup import CoreLocalBackupAgent from homeassistant.components.backup.const import DATA_MANAGER from homeassistant.core import HomeAssistant from homeassistant.helpers.backup import async_initialize_backup @@ -69,7 +70,7 @@ def mock_backup_agent(name: str, backups: list[AgentBackup] | None = None) -> Mo async def delete_backup(backup_id: str, **kwargs: Any) -> None: """Mock delete.""" - get_backup(backup_id) + await get_backup(backup_id) async def download_backup(backup_id: str, **kwargs: Any) -> AsyncIterator[bytes]: """Mock download.""" @@ -77,7 +78,7 @@ def mock_backup_agent(name: str, backups: list[AgentBackup] | None = None) -> Mo async def get_backup(backup_id: str, **kwargs: Any) -> AgentBackup: """Get a backup.""" - backup = next((b for b in backups if b.backup_id == backup_id), None) + backup = next((b for b in _backups if b.backup_id == backup_id), None) if backup is None: raise BackupNotFound return backup @@ -89,15 +90,15 @@ def mock_backup_agent(name: str, backups: list[AgentBackup] | None = None) -> Mo **kwargs: Any, ) -> None: """Upload a backup.""" - backups.append(backup) + _backups.append(backup) backup_stream = await open_stream() backup_data = bytearray() async for chunk in backup_stream: backup_data += chunk backups_data[backup.backup_id] = backup_data - backups = backups or [] - backups_data: dict[str, bytes] = {} + _backups = backups or [] + backups_data: dict[str, Buffer] = {} mock_agent = Mock(spec=BackupAgent) mock_agent.domain = TEST_DOMAIN mock_agent.name = name @@ -113,7 +114,7 @@ def mock_backup_agent(name: str, backups: list[AgentBackup] | None = None) -> Mo side_effect=get_backup, spec_set=[BackupAgent.async_get_backup] ) mock_agent.async_list_backups = AsyncMock( - return_value=backups, spec_set=[BackupAgent.async_list_backups] + return_value=_backups, spec_set=[BackupAgent.async_list_backups] ) mock_agent.async_upload_backup = AsyncMock( side_effect=upload_backup, @@ -160,11 +161,18 @@ async def setup_backup_integration( if LOCAL_AGENT_ID not in backups or with_hassio: return remote_agents_dict - agent = hass.data[DATA_MANAGER].backup_agents[LOCAL_AGENT_ID] + local_agent = cast( + CoreLocalBackupAgent, hass.data[DATA_MANAGER].backup_agents[LOCAL_AGENT_ID] + ) for backup in backups[LOCAL_AGENT_ID]: - await agent.async_upload_backup(open_stream=None, backup=backup) - agent._loaded_backups = True + await local_agent.async_upload_backup( + open_stream=AsyncMock( + side_effect=RuntimeError("Local agent does not open stream") + ), + backup=backup, + ) + local_agent._loaded_backups = True return remote_agents_dict diff --git a/tests/components/backup/conftest.py b/tests/components/backup/conftest.py index eb38399eb79..d391df44475 100644 --- a/tests/components/backup/conftest.py +++ b/tests/components/backup/conftest.py @@ -61,24 +61,49 @@ def path_glob_fixture(hass: HomeAssistant) -> Generator[MagicMock]: CONFIG_DIR = { - "testing_config": [ + "tests/testing_config": [ Path("test.txt"), Path(".DS_Store"), Path(".storage"), + Path("another_subdir"), Path("backups"), Path("tmp_backups"), + Path("tts"), Path("home-assistant_v2.db"), ], - "backups": [ + "/backups": [ Path("backups/backup.tar"), Path("backups/not_backup"), ], - "tmp_backups": [ + "/another_subdir": [ + Path("another_subdir/.DS_Store"), + Path("another_subdir/backups"), + Path("another_subdir/tts"), + ], + "another_subdir/backups": [ + Path("another_subdir/backups/backup.tar"), + Path("another_subdir/backups/not_backup"), + ], + "another_subdir/tts": [ + Path("another_subdir/tts/voice.mp3"), + ], + "/tmp_backups": [ # noqa: S108 Path("tmp_backups/forgotten_backup.tar"), Path("tmp_backups/not_backup"), ], + "/tts": [ + Path("tts/voice.mp3"), + ], +} +CONFIG_DIR_DIRS = { + Path(".storage"), + Path("another_subdir"), + Path("another_subdir/backups"), + Path("another_subdir/tts"), + Path("backups"), + Path("tmp_backups"), + Path("tts"), } -CONFIG_DIR_DIRS = {Path(".storage"), Path("backups"), Path("tmp_backups")} @pytest.fixture(name="create_backup") @@ -105,7 +130,10 @@ def mock_backup_generation_fixture( """Mock backup generator.""" with ( - patch("pathlib.Path.iterdir", lambda x: CONFIG_DIR.get(x.name, [])), + patch( + "pathlib.Path.iterdir", + lambda x: CONFIG_DIR.get(f"{x.parent.name}/{x.name}", []), + ), patch("pathlib.Path.stat", return_value=MagicMock(st_size=123)), patch("pathlib.Path.is_file", lambda x: x not in CONFIG_DIR_DIRS), patch("pathlib.Path.is_dir", lambda x: x in CONFIG_DIR_DIRS), diff --git a/tests/components/backup/snapshots/test_diagnostics.ambr b/tests/components/backup/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..cf412970204 --- /dev/null +++ b/tests/components/backup/snapshots/test_diagnostics.ambr @@ -0,0 +1,39 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'backup_agents': list([ + dict({ + 'agent_id': 'backup.local', + 'name': 'local', + }), + ]), + 'backup_config': dict({ + 'agents': dict({ + }), + 'automatic_backups_configured': False, + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'days': list([ + ]), + 'recurrence': 'never', + 'state': 'never', + 'time': None, + }), + }), + }) +# --- diff --git a/tests/components/backup/snapshots/test_sensors.ambr b/tests/components/backup/snapshots/test_sensors.ambr new file mode 100644 index 00000000000..be12afdbf1e --- /dev/null +++ b/tests/components/backup/snapshots/test_sensors.ambr @@ -0,0 +1,160 @@ +# serializer version: 1 +# name: test_sensors[sensor.backup_backup_manager_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'idle', + 'create_backup', + 'blocked', + 'receive_backup', + 'restore_backup', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.backup_backup_manager_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Backup Manager state', + 'platform': 'backup', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'backup_manager_state', + 'unique_id': 'backup_manager_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.backup_backup_manager_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Backup Backup Manager state', + 'options': list([ + 'idle', + 'create_backup', + 'blocked', + 'receive_backup', + 'restore_backup', + ]), + }), + 'context': , + 'entity_id': 'sensor.backup_backup_manager_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- +# name: test_sensors[sensor.backup_last_successful_automatic_backup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.backup_last_successful_automatic_backup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last successful automatic backup', + 'platform': 'backup', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_successful_automatic_backup', + 'unique_id': 'last_successful_automatic_backup', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.backup_last_successful_automatic_backup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Backup Last successful automatic backup', + }), + 'context': , + 'entity_id': 'sensor.backup_last_successful_automatic_backup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors[sensor.backup_next_scheduled_automatic_backup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.backup_next_scheduled_automatic_backup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Next scheduled automatic backup', + 'platform': 'backup', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'next_scheduled_automatic_backup', + 'unique_id': 'next_scheduled_automatic_backup', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.backup_next_scheduled_automatic_backup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Backup Next scheduled automatic backup', + }), + 'context': , + 'entity_id': 'sensor.backup_next_scheduled_automatic_backup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/backup/test_diagnostics.py b/tests/components/backup/test_diagnostics.py new file mode 100644 index 00000000000..a66b4a9a2ea --- /dev/null +++ b/tests/components/backup/test_diagnostics.py @@ -0,0 +1,26 @@ +"""Tests the diagnostics for Home Assistant Backup integration.""" + +from syrupy import SnapshotAssertion + +from homeassistant.components.backup.const import DOMAIN +from homeassistant.core import HomeAssistant + +from .common import setup_backup_integration + +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test diagnostics.""" + await setup_backup_integration(hass, with_hassio=False) + await hass.async_block_till_done(wait_background_tasks=True) + + entry = hass.config_entries.async_entries(DOMAIN)[0] + diag_data = await get_diagnostics_for_config_entry(hass, hass_client, entry) + + assert diag_data == snapshot diff --git a/tests/components/backup/test_init.py b/tests/components/backup/test_init.py index 8a0cc2b97c0..10bd2d8b97a 100644 --- a/tests/components/backup/test_init.py +++ b/tests/components/backup/test_init.py @@ -6,11 +6,13 @@ from unittest.mock import patch import pytest from homeassistant.components.backup.const import DATA_MANAGER, DOMAIN +from homeassistant.config_entries import SOURCE_SYSTEM, ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceNotFound from .common import setup_backup_integration +from tests.common import MockConfigEntry from tests.typing import WebSocketGenerator @@ -141,3 +143,17 @@ async def test_create_automatic_service( ) generate_backup.assert_called_once_with(**expected_kwargs) + + +async def test_setup_entry( + hass: HomeAssistant, +) -> None: + """Test setup backup config entry.""" + await setup_backup_integration(hass, with_hassio=False) + entry = MockConfigEntry(domain=DOMAIN, source=SOURCE_SYSTEM) + entry.add_to_hass(hass) + + with patch("homeassistant.components.backup.PLATFORMS", return_value=[]): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.state is ConfigEntryState.LOADED diff --git a/tests/components/backup/test_manager.py b/tests/components/backup/test_manager.py index fef4b84ac61..04072dae864 100644 --- a/tests/components/backup/test_manager.py +++ b/tests/components/backup/test_manager.py @@ -68,10 +68,17 @@ from tests.typing import ClientSessionGenerator, WebSocketGenerator _EXPECTED_FILES = [ "test.txt", ".storage", + "another_subdir", + "another_subdir/backups", + "another_subdir/backups/backup.tar", + "another_subdir/backups/not_backup", + "another_subdir/tts", + "another_subdir/tts/voice.mp3", "backups", "backups/not_backup", "tmp_backups", "tmp_backups/not_backup", + "tts", ] _EXPECTED_FILES_WITH_DATABASE = { True: [*_EXPECTED_FILES, "home-assistant_v2.db"], diff --git a/tests/components/backup/test_sensors.py b/tests/components/backup/test_sensors.py new file mode 100644 index 00000000000..bee61887ea5 --- /dev/null +++ b/tests/components/backup/test_sensors.py @@ -0,0 +1,119 @@ +"""Tests for the sensors of the Backup integration.""" + +from typing import Any +from unittest.mock import AsyncMock, MagicMock, patch + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.backup import store +from homeassistant.components.backup.const import DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .common import setup_backup_integration + +from tests.common import async_fire_time_changed, snapshot_platform +from tests.typing import WebSocketGenerator + + +@pytest.mark.usefixtures("mock_backup_generation") +async def test_sensors( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test setup of backup sensors.""" + with patch("homeassistant.components.backup.PLATFORMS", [Platform.SENSOR]): + await setup_backup_integration(hass, with_hassio=False) + await hass.async_block_till_done(wait_background_tasks=True) + + entry = hass.config_entries.async_entries(DOMAIN)[0] + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + + # start backup and check sensor states again + client = await hass_ws_client(hass) + await hass.async_block_till_done() + await client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": ["backup.local"]} + ) + + assert await client.receive_json() + state = hass.states.get("sensor.backup_backup_manager_state") + assert state.state == "create_backup" + + await hass.async_block_till_done(wait_background_tasks=True) + state = hass.states.get("sensor.backup_backup_manager_state") + assert state.state == "idle" + + +async def test_sensor_updates( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, + hass_storage: dict[str, Any], + create_backup: AsyncMock, +) -> None: + """Test update of backup sensors.""" + # Ensure created backup is already protected, + # to avoid manager creating a new EncryptedBackupStreamer + # instead of using the already mocked stream writer. + created_backup: MagicMock = create_backup.return_value[1].result().backup + created_backup.protected = True + + await hass.config.async_set_time_zone("Europe/Amsterdam") + freezer.move_to("2024-11-12T12:00:00+01:00") + storage_data = { + "backups": [], + "config": { + "agents": {}, + "automatic_backups_configured": True, + "create_backup": { + "agent_ids": ["test.remote"], + "include_addons": [], + "include_all_addons": False, + "include_database": True, + "include_folders": [], + "name": "test-name", + "password": "test-password", + }, + "retention": {"copies": None, "days": None}, + "last_attempted_automatic_backup": "2024-11-11T04:45:00+01:00", + "last_completed_automatic_backup": "2024-11-11T04:45:00+01:00", + "schedule": { + "days": [], + "recurrence": "daily", + "state": "never", + "time": "06:00", + }, + }, + } + hass_storage[DOMAIN] = { + "data": storage_data, + "key": DOMAIN, + "version": store.STORAGE_VERSION, + "minor_version": store.STORAGE_VERSION_MINOR, + } + + with patch("homeassistant.components.backup.PLATFORMS", [Platform.SENSOR]): + await setup_backup_integration( + hass, with_hassio=False, remote_agents=["test.remote"] + ) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get("sensor.backup_last_successful_automatic_backup") + assert state.state == "2024-11-11T03:45:00+00:00" + state = hass.states.get("sensor.backup_next_scheduled_automatic_backup") + assert state.state == "2024-11-13T05:00:00+00:00" + + freezer.move_to("2024-11-13T12:00:00+01:00") + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get("sensor.backup_last_successful_automatic_backup") + assert state.state == "2024-11-13T11:00:00+00:00" + state = hass.states.get("sensor.backup_next_scheduled_automatic_backup") + assert state.state == "2024-11-14T05:00:00+00:00" diff --git a/tests/components/bmw_connected_drive/snapshots/test_select.ambr b/tests/components/bmw_connected_drive/snapshots/test_select.ambr index de76b07057e..0edead03f26 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_select.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_select.ambr @@ -30,7 +30,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Charging Mode', + 'original_name': 'Charging mode', 'platform': 'bmw_connected_drive', 'previous_unique_id': None, 'supported_features': 0, @@ -42,7 +42,7 @@ # name: test_entity_state_attrs[select.i3_rex_charging_mode-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'i3 (+ REX) Charging Mode', + 'friendly_name': 'i3 (+ REX) Charging mode', 'options': list([ 'immediate_charging', 'delayed_charging', @@ -98,7 +98,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'AC Charging Limit', + 'original_name': 'AC charging limit', 'platform': 'bmw_connected_drive', 'previous_unique_id': None, 'supported_features': 0, @@ -110,7 +110,7 @@ # name: test_entity_state_attrs[select.i4_edrive40_ac_charging_limit-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'i4 eDrive40 AC Charging Limit', + 'friendly_name': 'i4 eDrive40 AC charging limit', 'options': list([ '6', '7', @@ -167,7 +167,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Charging Mode', + 'original_name': 'Charging mode', 'platform': 'bmw_connected_drive', 'previous_unique_id': None, 'supported_features': 0, @@ -179,7 +179,7 @@ # name: test_entity_state_attrs[select.i4_edrive40_charging_mode-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'i4 eDrive40 Charging Mode', + 'friendly_name': 'i4 eDrive40 Charging mode', 'options': list([ 'immediate_charging', 'delayed_charging', @@ -235,7 +235,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'AC Charging Limit', + 'original_name': 'AC charging limit', 'platform': 'bmw_connected_drive', 'previous_unique_id': None, 'supported_features': 0, @@ -247,7 +247,7 @@ # name: test_entity_state_attrs[select.ix_xdrive50_ac_charging_limit-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'iX xDrive50 AC Charging Limit', + 'friendly_name': 'iX xDrive50 AC charging limit', 'options': list([ '6', '7', @@ -304,7 +304,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Charging Mode', + 'original_name': 'Charging mode', 'platform': 'bmw_connected_drive', 'previous_unique_id': None, 'supported_features': 0, @@ -316,7 +316,7 @@ # name: test_entity_state_attrs[select.ix_xdrive50_charging_mode-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'iX xDrive50 Charging Mode', + 'friendly_name': 'iX xDrive50 Charging mode', 'options': list([ 'immediate_charging', 'delayed_charging', diff --git a/tests/components/bosch_alarm/__init__.py b/tests/components/bosch_alarm/__init__.py new file mode 100644 index 00000000000..2b2d94cf1e5 --- /dev/null +++ b/tests/components/bosch_alarm/__init__.py @@ -0,0 +1,22 @@ +"""Tests for the Bosch Alarm component.""" + +from unittest.mock import AsyncMock + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + +async def call_observable(hass: HomeAssistant, observable: AsyncMock) -> None: + """Call the observable with the given event.""" + for callback in observable.attach.call_args_list: + callback[0][0]() + await hass.async_block_till_done() diff --git a/tests/components/bosch_alarm/conftest.py b/tests/components/bosch_alarm/conftest.py new file mode 100644 index 00000000000..45ec0072a37 --- /dev/null +++ b/tests/components/bosch_alarm/conftest.py @@ -0,0 +1,131 @@ +"""Define fixtures for Bosch Alarm tests.""" + +from collections.abc import Generator +from typing import Any +from unittest.mock import AsyncMock, patch + +from bosch_alarm_mode2.panel import Area +from bosch_alarm_mode2.utils import Observable +import pytest + +from homeassistant.components.bosch_alarm.const import ( + CONF_INSTALLER_CODE, + CONF_USER_CODE, + DOMAIN, +) +from homeassistant.const import CONF_HOST, CONF_MODEL, CONF_PASSWORD, CONF_PORT + +from tests.common import MockConfigEntry + + +@pytest.fixture( + params=[ + "solution_3000", + "amax_3000", + "b5512", + ] +) +def model(request: pytest.FixtureRequest) -> Generator[str]: + """Return every device.""" + return request.param + + +@pytest.fixture +def extra_config_entry_data( + model: str, model_name: str, config_flow_data: dict[str, Any] +) -> dict[str, Any]: + """Return extra config entry data.""" + return {CONF_MODEL: model_name} | config_flow_data + + +@pytest.fixture +def config_flow_data(model: str) -> dict[str, Any]: + """Return extra config entry data.""" + if model == "solution_3000": + return {CONF_USER_CODE: "1234"} + if model == "amax_3000": + return {CONF_INSTALLER_CODE: "1234", CONF_PASSWORD: "1234567890"} + if model == "b5512": + return {CONF_PASSWORD: "1234567890"} + pytest.fail("Invalid model") + + +@pytest.fixture +def model_name(model: str) -> str | None: + """Return extra config entry data.""" + return { + "solution_3000": "Solution 3000", + "amax_3000": "AMAX 3000", + "b5512": "B5512 (US1B)", + }.get(model) + + +@pytest.fixture +def serial_number(model: str) -> str | None: + """Return extra config entry data.""" + if model == "solution_3000": + return "1234567890" + return None + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.bosch_alarm.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def area() -> Generator[Area]: + """Define a mocked area.""" + mock = AsyncMock(spec=Area) + mock.name = "Area1" + mock.status_observer = AsyncMock(spec=Observable) + mock.is_triggered.return_value = False + mock.is_disarmed.return_value = True + mock.is_arming.return_value = False + mock.is_pending.return_value = False + mock.is_part_armed.return_value = False + mock.is_all_armed.return_value = False + return mock + + +@pytest.fixture +def mock_panel( + area: AsyncMock, model_name: str, serial_number: str | None +) -> Generator[AsyncMock]: + """Define a fixture to set up Bosch Alarm.""" + with ( + patch( + "homeassistant.components.bosch_alarm.Panel", autospec=True + ) as mock_panel, + patch("homeassistant.components.bosch_alarm.config_flow.Panel", new=mock_panel), + ): + client = mock_panel.return_value + client.areas = {1: area} + client.model = model_name + client.firmware_version = "1.0.0" + client.serial_number = serial_number + client.connection_status_observer = AsyncMock(spec=Observable) + yield client + + +@pytest.fixture +def mock_config_entry( + extra_config_entry_data: dict[str, Any], serial_number: str | None +) -> MockConfigEntry: + """Mock config entry for bosch alarm.""" + return MockConfigEntry( + domain=DOMAIN, + unique_id=serial_number, + entry_id="01JQ917ACKQ33HHM7YCFXYZX51", + data={ + CONF_HOST: "0.0.0.0", + CONF_PORT: 7700, + CONF_MODEL: "bosch_alarm_test_data.model", + } + | extra_config_entry_data, + ) diff --git a/tests/components/bosch_alarm/snapshots/test_alarm_control_panel.ambr b/tests/components/bosch_alarm/snapshots/test_alarm_control_panel.ambr new file mode 100644 index 00000000000..76568cef56c --- /dev/null +++ b/tests/components/bosch_alarm/snapshots/test_alarm_control_panel.ambr @@ -0,0 +1,154 @@ +# serializer version: 1 +# name: test_alarm_control_panel[amax_3000][alarm_control_panel.area1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'alarm_control_panel', + 'entity_category': None, + 'entity_id': 'alarm_control_panel.area1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'bosch_alarm', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01JQ917ACKQ33HHM7YCFXYZX51_area_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_alarm_control_panel[amax_3000][alarm_control_panel.area1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'changed_by': None, + 'code_arm_required': False, + 'code_format': None, + 'friendly_name': 'Area1', + 'supported_features': , + }), + 'context': , + 'entity_id': 'alarm_control_panel.area1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'disarmed', + }) +# --- +# name: test_alarm_control_panel[b5512][alarm_control_panel.area1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'alarm_control_panel', + 'entity_category': None, + 'entity_id': 'alarm_control_panel.area1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'bosch_alarm', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01JQ917ACKQ33HHM7YCFXYZX51_area_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_alarm_control_panel[b5512][alarm_control_panel.area1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'changed_by': None, + 'code_arm_required': False, + 'code_format': None, + 'friendly_name': 'Area1', + 'supported_features': , + }), + 'context': , + 'entity_id': 'alarm_control_panel.area1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'disarmed', + }) +# --- +# name: test_alarm_control_panel[solution_3000][alarm_control_panel.area1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'alarm_control_panel', + 'entity_category': None, + 'entity_id': 'alarm_control_panel.area1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'bosch_alarm', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '1234567890_area_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_alarm_control_panel[solution_3000][alarm_control_panel.area1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'changed_by': None, + 'code_arm_required': False, + 'code_format': None, + 'friendly_name': 'Area1', + 'supported_features': , + }), + 'context': , + 'entity_id': 'alarm_control_panel.area1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'disarmed', + }) +# --- diff --git a/tests/components/bosch_alarm/test_alarm_control_panel.py b/tests/components/bosch_alarm/test_alarm_control_panel.py new file mode 100644 index 00000000000..31d2f928ec5 --- /dev/null +++ b/tests/components/bosch_alarm/test_alarm_control_panel.py @@ -0,0 +1,145 @@ +"""Tests for Bosch Alarm component.""" + +from collections.abc import AsyncGenerator +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.alarm_control_panel import ( + DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, + AlarmControlPanelState, +) +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_ALARM_ARM_AWAY, + SERVICE_ALARM_ARM_HOME, + SERVICE_ALARM_DISARM, + STATE_UNAVAILABLE, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import call_observable, setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.fixture(autouse=True) +async def platforms() -> AsyncGenerator[None]: + """Return the platforms to be loaded for this test.""" + with patch( + "homeassistant.components.bosch_alarm.PLATFORMS", [Platform.ALARM_CONTROL_PANEL] + ): + yield + + +async def test_update_alarm_device( + hass: HomeAssistant, + mock_panel: AsyncMock, + area: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that alarm panel state changes after arming the panel.""" + await setup_integration(hass, mock_config_entry) + entity_id = "alarm_control_panel.area1" + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + + area.is_arming.return_value = True + area.is_disarmed.return_value = False + + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_ARM_AWAY, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + await call_observable(hass, area.status_observer) + + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMING + + area.is_arming.return_value = False + area.is_all_armed.return_value = True + + await call_observable(hass, area.status_observer) + + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY + + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_DISARM, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + area.is_all_armed.return_value = False + area.is_disarmed.return_value = True + + await call_observable(hass, area.status_observer) + + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_ARM_HOME, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + area.is_disarmed.return_value = False + area.is_arming.return_value = True + + await call_observable(hass, area.status_observer) + + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMING + + area.is_arming.return_value = False + area.is_part_armed.return_value = True + + await call_observable(hass, area.status_observer) + + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_HOME + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_DISARM, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + area.is_part_armed.return_value = False + area.is_disarmed.return_value = True + + await call_observable(hass, area.status_observer) + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + + +async def test_alarm_control_panel( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_panel: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the alarm_control_panel state.""" + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_alarm_control_panel_availability( + hass: HomeAssistant, + mock_panel: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the alarm_control_panel availability.""" + await setup_integration(hass, mock_config_entry) + + assert ( + hass.states.get("alarm_control_panel.area1").state + == AlarmControlPanelState.DISARMED + ) + + mock_panel.connection_status.return_value = False + + await call_observable(hass, mock_panel.connection_status_observer) + + assert hass.states.get("alarm_control_panel.area1").state == STATE_UNAVAILABLE diff --git a/tests/components/bosch_alarm/test_config_flow.py b/tests/components/bosch_alarm/test_config_flow.py new file mode 100644 index 00000000000..066b3008821 --- /dev/null +++ b/tests/components/bosch_alarm/test_config_flow.py @@ -0,0 +1,212 @@ +"""Tests for the bosch_alarm config flow.""" + +import asyncio +from typing import Any +from unittest.mock import AsyncMock + +import pytest + +from homeassistant import config_entries +from homeassistant.components.bosch_alarm.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_HOST, CONF_MODEL, CONF_PORT +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_form_user( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_panel: AsyncMock, + model_name: str, + serial_number: str, + config_flow_data: dict[str, Any], +) -> None: + """Test the config flow for bosch_alarm.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "1.1.1.1", CONF_PORT: 7700}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + assert result["errors"] == {} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + config_flow_data, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == f"Bosch {model_name}" + assert ( + result["data"] + == { + CONF_HOST: "1.1.1.1", + CONF_PORT: 7700, + CONF_MODEL: model_name, + } + | config_flow_data + ) + assert result["result"].unique_id == serial_number + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("exception", "message"), + [ + (asyncio.TimeoutError, "cannot_connect"), + (Exception, "unknown"), + ], +) +async def test_form_exceptions( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_panel: AsyncMock, + config_flow_data: dict[str, Any], + exception: Exception, + message: str, +) -> None: + """Test we handle exceptions correctly.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + mock_panel.connect.side_effect = exception + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "1.1.1.1", CONF_PORT: 7700}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": message} + + mock_panel.connect.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "1.1.1.1", CONF_PORT: 7700}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + config_flow_data, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + +@pytest.mark.parametrize( + ("exception", "message"), + [ + (PermissionError, "invalid_auth"), + (asyncio.TimeoutError, "cannot_connect"), + (Exception, "unknown"), + ], +) +async def test_form_exceptions_user( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_panel: AsyncMock, + config_flow_data: dict[str, Any], + exception: Exception, + message: str, +) -> None: + """Test we handle exceptions correctly.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "1.1.1.1", CONF_PORT: 7700}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + assert result["errors"] == {} + mock_panel.connect.side_effect = exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], config_flow_data + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + assert result["errors"] == {"base": message} + + mock_panel.connect.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], config_flow_data + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + +@pytest.mark.parametrize("model", ["solution_3000", "amax_3000"]) +async def test_entry_already_configured_host( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_panel: AsyncMock, + config_flow_data: dict[str, Any], +) -> None: + """Test if configuring an entity twice results in an error.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: "0.0.0.0"} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + assert result["errors"] == {} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], config_flow_data + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.parametrize("model", ["b5512"]) +async def test_entry_already_configured_serial( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_panel: AsyncMock, + config_flow_data: dict[str, Any], +) -> None: + """Test if configuring an entity twice results in an error.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: "0.0.0.0"} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + assert result["errors"] == {} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], config_flow_data + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/bosch_alarm/test_init.py b/tests/components/bosch_alarm/test_init.py new file mode 100644 index 00000000000..0497a91eadf --- /dev/null +++ b/tests/components/bosch_alarm/test_init.py @@ -0,0 +1,33 @@ +"""Tests for bosch alarm integration init.""" + +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.fixture(autouse=True) +def disable_platform_only(): + """Disable platforms to speed up tests.""" + with patch("homeassistant.components.bosch_alarm.PLATFORMS", []): + yield + + +@pytest.mark.parametrize("model", ["solution_3000"]) +@pytest.mark.parametrize("exception", [PermissionError(), TimeoutError()]) +async def test_incorrect_auth( + hass: HomeAssistant, + mock_panel: AsyncMock, + mock_config_entry: MockConfigEntry, + exception: Exception, +) -> None: + """Test errors with incorrect auth.""" + mock_panel.connect.side_effect = exception + await setup_integration(hass, mock_config_entry) + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/cambridge_audio/test_media_player.py b/tests/components/cambridge_audio/test_media_player.py index bb2ccd1aec4..ef7e911fbba 100644 --- a/tests/components/cambridge_audio/test_media_player.py +++ b/tests/components/cambridge_audio/test_media_player.py @@ -10,6 +10,7 @@ from aiostreammagic import ( import pytest from homeassistant.components.media_player import ( + ATTR_MEDIA_ARTIST, ATTR_MEDIA_CONTENT_ID, ATTR_MEDIA_CONTENT_TYPE, ATTR_MEDIA_REPEAT, @@ -489,3 +490,41 @@ async def test_play_media_unknown_type( }, blocking=True, ) + + +@pytest.mark.parametrize( + ("source_id", "artist", "station", "display"), + [ + ("MEDIA_PLAYER", "Metallica", None, "Metallica"), + ("USB_AUDIO", "Iron Maiden", "Radio BOB!", "Iron Maiden"), + ("IR", "In Flames", "Radio BOB!", "In Flames"), + ("IR", None, "Radio BOB!", "Radio BOB!"), + ("IR", None, None, None), + ("MEDIA_PLAYER", None, "Radio BOB!", None), + ], +) +async def test_media_artist( + hass: HomeAssistant, + mock_stream_magic_client: AsyncMock, + mock_config_entry: MockConfigEntry, + source_id: str, + artist: str, + station: str, + display: str, +) -> None: + """Test media player state.""" + await setup_integration(hass, mock_config_entry) + mock_stream_magic_client.play_state.metadata.artist = artist + mock_stream_magic_client.play_state.metadata.station = station + mock_stream_magic_client.state.source = source_id + + await mock_state_update(mock_stream_magic_client) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_ID) + if (artist is None and source_id != "IR") or ( + source_id == "IR" and station is None + ): + assert ATTR_MEDIA_ARTIST not in state.attributes + else: + assert state.attributes[ATTR_MEDIA_ARTIST] == display diff --git a/tests/components/cloud/test_backup.py b/tests/components/cloud/test_backup.py index dd6252c4d62..8399e69ab09 100644 --- a/tests/components/cloud/test_backup.py +++ b/tests/components/cloud/test_backup.py @@ -5,9 +5,9 @@ from io import StringIO from typing import Any from unittest.mock import ANY, Mock, PropertyMock, patch -from aiohttp import ClientError +from aiohttp import ClientError, ClientResponseError from hass_nabucasa import CloudError -from hass_nabucasa.api import CloudApiNonRetryableError +from hass_nabucasa.api import CloudApiError, CloudApiNonRetryableError from hass_nabucasa.files import FilesError, StorageType import pytest @@ -547,6 +547,120 @@ async def test_agents_upload_not_protected( assert stored_backup["failed_agent_ids"] == ["cloud.cloud"] +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_upload_not_subscribed( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + hass_storage: dict[str, Any], + cloud: Mock, +) -> None: + """Test upload backup when cloud user is not subscribed.""" + cloud.subscription_expired = True + client = await hass_client() + backup_data = "test" + backup_id = "test-backup" + test_backup = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id=backup_id, + database_included=True, + date="1970-01-01T00:00:00.000Z", + extra_metadata={}, + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=True, + size=len(backup_data), + ) + + with ( + patch( + "homeassistant.components.backup.manager.BackupManager.async_get_backup", + ) as fetch_backup, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=test_backup, + ), + patch("pathlib.Path.open") as mocked_open, + ): + mocked_open.return_value.read = Mock(side_effect=[backup_data.encode(), b""]) + fetch_backup.return_value = test_backup + resp = await client.post( + "/api/backup/upload?agent_id=cloud.cloud", + data={"file": StringIO(backup_data)}, + ) + await hass.async_block_till_done() + + assert resp.status == 201 + assert cloud.files.upload.call_count == 0 + store_backups = hass_storage[BACKUP_DOMAIN]["data"]["backups"] + assert len(store_backups) == 1 + stored_backup = store_backups[0] + assert stored_backup["backup_id"] == backup_id + assert stored_backup["failed_agent_ids"] == ["cloud.cloud"] + + +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_upload_not_subscribed_midway( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + hass_storage: dict[str, Any], + cloud: Mock, +) -> None: + """Test upload backup when cloud subscription expires during the call.""" + client = await hass_client() + backup_data = "test" + backup_id = "test-backup" + test_backup = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id=backup_id, + database_included=True, + date="1970-01-01T00:00:00.000Z", + extra_metadata={}, + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=True, + size=len(backup_data), + ) + + async def mock_upload(*args: Any, **kwargs: Any) -> None: + """Mock file upload.""" + cloud.subscription_expired = True + raise CloudApiError( + "Boom!", orig_exc=ClientResponseError(Mock(), Mock(), status=403) + ) + + cloud.files.upload.side_effect = mock_upload + + with ( + patch( + "homeassistant.components.backup.manager.BackupManager.async_get_backup", + ) as fetch_backup, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=test_backup, + ), + patch("pathlib.Path.open") as mocked_open, + ): + mocked_open.return_value.read = Mock(side_effect=[backup_data.encode(), b""]) + fetch_backup.return_value = test_backup + resp = await client.post( + "/api/backup/upload?agent_id=cloud.cloud", + data={"file": StringIO(backup_data)}, + ) + await hass.async_block_till_done() + + assert resp.status == 201 + assert cloud.files.upload.call_count == 1 + store_backups = hass_storage[BACKUP_DOMAIN]["data"]["backups"] + assert len(store_backups) == 1 + stored_backup = store_backups[0] + assert stored_backup["backup_id"] == backup_id + assert stored_backup["failed_agent_ids"] == ["cloud.cloud"] + + @pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") async def test_agents_upload_wrong_size( hass: HomeAssistant, diff --git a/tests/components/comelit/conftest.py b/tests/components/comelit/conftest.py index d2d450ccb8d..1e5e85cd26e 100644 --- a/tests/components/comelit/conftest.py +++ b/tests/components/comelit/conftest.py @@ -1,5 +1,7 @@ """Configure tests for Comelit SimpleHome.""" +from copy import deepcopy + import pytest from homeassistant.components.comelit.const import ( @@ -47,10 +49,10 @@ def mock_serial_bridge() -> Generator[AsyncMock]: ), ): bridge = mock_comelit_serial_bridge.return_value - bridge.get_all_devices.return_value = BRIDGE_DEVICE_QUERY + bridge.get_all_devices.return_value = deepcopy(BRIDGE_DEVICE_QUERY) bridge.host = BRIDGE_HOST bridge.port = BRIDGE_PORT - bridge.pin = BRIDGE_PIN + bridge.device_pin = BRIDGE_PIN yield bridge @@ -65,6 +67,7 @@ def mock_serial_bridge_config_entry() -> Generator[MockConfigEntry]: CONF_PIN: BRIDGE_PIN, CONF_TYPE: BRIDGE, }, + entry_id="serial_bridge_config_entry_id", ) @@ -82,10 +85,10 @@ def mock_vedo() -> Generator[AsyncMock]: ), ): vedo = mock_comelit_vedo.return_value - vedo.get_all_areas_and_zones.return_value = VEDO_DEVICE_QUERY + vedo.get_all_areas_and_zones.return_value = deepcopy(VEDO_DEVICE_QUERY) vedo.host = VEDO_HOST vedo.port = VEDO_PORT - vedo.pin = VEDO_PIN + vedo.device_pin = VEDO_PIN vedo.type = VEDO yield vedo @@ -101,4 +104,5 @@ def mock_vedo_config_entry() -> Generator[MockConfigEntry]: CONF_PIN: VEDO_PIN, CONF_TYPE: VEDO, }, + entry_id="vedo_config_entry_id", ) diff --git a/tests/components/comelit/const.py b/tests/components/comelit/const.py index f353ec97628..d06e6cfd8cb 100644 --- a/tests/components/comelit/const.py +++ b/tests/components/comelit/const.py @@ -29,13 +29,30 @@ VEDO_PIN = 5678 FAKE_PIN = 0000 BRIDGE_DEVICE_QUERY = { - CLIMATE: {}, + CLIMATE: { + 0: ComelitSerialBridgeObject( + index=0, + name="Climate0", + status=0, + human_status="off", + type="climate", + val=[ + [221, 0, "U", "M", 50, 0, 0, "U"], + [650, 0, "O", "M", 500, 0, 0, "N"], + [0, 0], + ], + protected=0, + zone="Living room", + power=0.0, + power_unit=WATT, + ), + }, COVER: { 0: ComelitSerialBridgeObject( index=0, name="Cover0", status=0, - human_status="closed", + human_status="stopped", type="cover", val=0, protected=0, @@ -58,7 +75,20 @@ BRIDGE_DEVICE_QUERY = { power_unit=WATT, ) }, - OTHER: {}, + OTHER: { + 0: ComelitSerialBridgeObject( + index=0, + name="Switch0", + status=0, + human_status="off", + type="other", + val=0, + protected=0, + zone="Bathroom", + power=0.0, + power_unit=WATT, + ), + }, IRRIGATION: {}, SCENARIO: {}, } @@ -69,16 +99,16 @@ VEDO_DEVICE_QUERY = AlarmDataObject( index=0, name="Area0", p1=True, - p2=False, + p2=True, ready=False, - armed=False, + armed=0, alarm=False, alarm_memory=False, sabotage=False, anomaly=False, in_time=False, out_time=False, - human_status=AlarmAreaState.UNKNOWN, + human_status=AlarmAreaState.DISARMED, ) }, alarm_zones={ diff --git a/tests/components/comelit/snapshots/test_climate.ambr b/tests/components/comelit/snapshots/test_climate.ambr new file mode 100644 index 00000000000..e5201067ee1 --- /dev/null +++ b/tests/components/comelit/snapshots/test_climate.ambr @@ -0,0 +1,71 @@ +# serializer version: 1 +# name: test_all_entities[climate.climate0-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 30, + 'min_temp': 5, + 'target_temp_step': 0.1, + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.climate0', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'comelit', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'serial_bridge_config_entry_id-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[climate.climate0-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 22.1, + 'friendly_name': 'Climate0', + 'hvac_action': , + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 30, + 'min_temp': 5, + 'supported_features': , + 'target_temp_step': 0.1, + 'temperature': 5.0, + }), + 'context': , + 'entity_id': 'climate.climate0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- diff --git a/tests/components/comelit/snapshots/test_cover.ambr b/tests/components/comelit/snapshots/test_cover.ambr new file mode 100644 index 00000000000..17189344cd1 --- /dev/null +++ b/tests/components/comelit/snapshots/test_cover.ambr @@ -0,0 +1,50 @@ +# serializer version: 1 +# name: test_all_entities[cover.cover0-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.cover0', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'comelit', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'serial_bridge_config_entry_id-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[cover.cover0-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'shutter', + 'friendly_name': 'Cover0', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.cover0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/comelit/snapshots/test_diagnostics.ambr b/tests/components/comelit/snapshots/test_diagnostics.ambr index c4544f38f52..51ea646df9f 100644 --- a/tests/components/comelit/snapshots/test_diagnostics.ambr +++ b/tests/components/comelit/snapshots/test_diagnostics.ambr @@ -5,13 +5,50 @@ 'devices': list([ dict({ 'clima': list([ + dict({ + '0': dict({ + 'human_status': 'off', + 'name': 'Climate0', + 'power': 0.0, + 'power_unit': 'W', + 'protected': 0, + 'status': 0, + 'val': list([ + list([ + 221, + 0, + 'U', + 'M', + 50, + 0, + 0, + 'U', + ]), + list([ + 650, + 0, + 'O', + 'M', + 500, + 0, + 0, + 'N', + ]), + list([ + 0, + 0, + ]), + ]), + 'zone': 'Living room', + }), + }), ]), }), dict({ 'shutter': list([ dict({ '0': dict({ - 'human_status': 'closed', + 'human_status': 'stopped', 'name': 'Cover0', 'power': 0.0, 'power_unit': 'W', @@ -41,6 +78,18 @@ }), dict({ 'other': list([ + dict({ + '0': dict({ + 'human_status': 'off', + 'name': 'Switch0', + 'power': 0.0, + 'power_unit': 'W', + 'protected': 0, + 'status': 0, + 'val': 0, + 'zone': 'Bathroom', + }), + }), ]), }), dict({ @@ -92,13 +141,13 @@ 'alarm': False, 'alarm_memory': False, 'anomaly': False, - 'armed': False, - 'human_status': 'unknown', + 'armed': 0, + 'human_status': 'disarmed', 'in_time': False, 'name': 'Area0', 'out_time': False, 'p1': True, - 'p2': False, + 'p2': True, 'ready': False, 'sabotage': False, }), diff --git a/tests/components/comelit/snapshots/test_light.ambr b/tests/components/comelit/snapshots/test_light.ambr new file mode 100644 index 00000000000..c60c962e23d --- /dev/null +++ b/tests/components/comelit/snapshots/test_light.ambr @@ -0,0 +1,57 @@ +# serializer version: 1 +# name: test_all_entities[light.light0-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.light0', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'comelit', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'serial_bridge_config_entry_id-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[light.light0-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'color_mode': None, + 'friendly_name': 'Light0', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.light0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/comelit/snapshots/test_sensor.ambr b/tests/components/comelit/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..dabae2a1bf0 --- /dev/null +++ b/tests/components/comelit/snapshots/test_sensor.ambr @@ -0,0 +1,76 @@ +# serializer version: 1 +# name: test_all_entities[sensor.zone0-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'alarm', + 'armed', + 'open', + 'excluded', + 'faulty', + 'inhibited', + 'isolated', + 'rest', + 'sabotated', + 'unavailable', + 'unknown', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.zone0', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'comelit', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'zone_status', + 'unique_id': 'vedo_config_entry_id-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.zone0-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Zone0', + 'options': list([ + 'alarm', + 'armed', + 'open', + 'excluded', + 'faulty', + 'inhibited', + 'isolated', + 'rest', + 'sabotated', + 'unavailable', + 'unknown', + ]), + }), + 'context': , + 'entity_id': 'sensor.zone0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'rest', + }) +# --- diff --git a/tests/components/comelit/snapshots/test_switch.ambr b/tests/components/comelit/snapshots/test_switch.ambr new file mode 100644 index 00000000000..eddecfabb7a --- /dev/null +++ b/tests/components/comelit/snapshots/test_switch.ambr @@ -0,0 +1,49 @@ +# serializer version: 1 +# name: test_all_entities[switch.switch0-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.switch0', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'comelit', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'serial_bridge_config_entry_id-other-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[switch.switch0-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Switch0', + }), + 'context': , + 'entity_id': 'switch.switch0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/comelit/test_alarm_control_panel.py b/tests/components/comelit/test_alarm_control_panel.py new file mode 100644 index 00000000000..d3feac6ad3b --- /dev/null +++ b/tests/components/comelit/test_alarm_control_panel.py @@ -0,0 +1,155 @@ +"""Tests for Comelit SimpleHome alarm control panel platform.""" + +from unittest.mock import AsyncMock + +from aiocomelit.api import AlarmDataObject, ComelitVedoAreaObject, ComelitVedoZoneObject +from aiocomelit.const import AlarmAreaState, AlarmZoneState +from freezegun.api import FrozenDateTimeFactory +import pytest + +from homeassistant.components.alarm_control_panel import ( + ATTR_CODE, + DOMAIN as ALARM_DOMAIN, + SERVICE_ALARM_ARM_AWAY, + SERVICE_ALARM_ARM_HOME, + SERVICE_ALARM_ARM_NIGHT, + SERVICE_ALARM_DISARM, + AlarmControlPanelState, +) +from homeassistant.components.comelit.const import SCAN_INTERVAL +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant + +from . import setup_integration +from .const import VEDO_PIN + +from tests.common import MockConfigEntry, async_fire_time_changed + +ENTITY_ID = "alarm_control_panel.area0" + + +@pytest.mark.parametrize( + ("human_status", "armed", "alarm_state"), + [ + (AlarmAreaState.DISARMED, 0, AlarmControlPanelState.DISARMED), + (AlarmAreaState.ARMED, 1, AlarmControlPanelState.ARMED_HOME), + (AlarmAreaState.ARMED, 2, AlarmControlPanelState.ARMED_HOME), + (AlarmAreaState.ARMED, 3, AlarmControlPanelState.ARMED_NIGHT), + (AlarmAreaState.ARMED, 4, AlarmControlPanelState.ARMED_AWAY), + (AlarmAreaState.UNKNOWN, 0, STATE_UNAVAILABLE), + ], +) +async def test_entity_availability( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_vedo: AsyncMock, + mock_vedo_config_entry: MockConfigEntry, + human_status: AlarmAreaState, + armed: int, + alarm_state: AlarmControlPanelState, +) -> None: + """Test all entities.""" + + await setup_integration(hass, mock_vedo_config_entry) + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == AlarmControlPanelState.DISARMED + + vedo_query = AlarmDataObject( + alarm_areas={ + 0: ComelitVedoAreaObject( + index=0, + name="Area0", + p1=True, + p2=True, + ready=False, + armed=armed, + alarm=False, + alarm_memory=False, + sabotage=False, + anomaly=False, + in_time=False, + out_time=False, + human_status=human_status, + ) + }, + alarm_zones={ + 0: ComelitVedoZoneObject( + index=0, + name="Zone0", + status_api="0x000", + status=0, + human_status=AlarmZoneState.REST, + ) + }, + ) + + mock_vedo.get_all_areas_and_zones.return_value = vedo_query + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == alarm_state + + +@pytest.mark.parametrize( + ("service", "alarm_state"), + [ + (SERVICE_ALARM_DISARM, AlarmControlPanelState.DISARMED), + (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), + (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), + ], +) +async def test_arming_disarming( + hass: HomeAssistant, + mock_vedo: AsyncMock, + mock_vedo_config_entry: MockConfigEntry, + service: str, + alarm_state: AlarmControlPanelState, +) -> None: + """Test arming and disarming.""" + + await setup_integration(hass, mock_vedo_config_entry) + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == AlarmControlPanelState.DISARMED + + await hass.services.async_call( + ALARM_DOMAIN, + service, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_CODE: VEDO_PIN}, + blocking=True, + ) + + mock_vedo.set_zone_status.assert_called() + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == alarm_state + + +async def test_wrong_code( + hass: HomeAssistant, + mock_vedo: AsyncMock, + mock_vedo_config_entry: MockConfigEntry, +) -> None: + """Test disarm service with wrong code.""" + + await setup_integration(hass, mock_vedo_config_entry) + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == AlarmControlPanelState.DISARMED + + await hass.services.async_call( + ALARM_DOMAIN, + SERVICE_ALARM_DISARM, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_CODE: 1111}, + blocking=True, + ) + + mock_vedo.set_zone_status.assert_not_called() + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == AlarmControlPanelState.DISARMED diff --git a/tests/components/comelit/test_climate.py b/tests/components/comelit/test_climate.py new file mode 100644 index 00000000000..44478d154f4 --- /dev/null +++ b/tests/components/comelit/test_climate.py @@ -0,0 +1,282 @@ +"""Tests for Comelit SimpleHome climate platform.""" + +from typing import Any +from unittest.mock import AsyncMock, patch + +from aiocomelit.api import ComelitSerialBridgeObject +from aiocomelit.const import CLIMATE, WATT +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.climate import ( + ATTR_HVAC_MODE, + DOMAIN as CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + SERVICE_SET_TEMPERATURE, + HVACMode, +) +from homeassistant.components.comelit.const import SCAN_INTERVAL +from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + +ENTITY_ID = "climate.climate0" + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_serial_bridge: AsyncMock, + mock_serial_bridge_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.comelit.BRIDGE_PLATFORMS", [Platform.CLIMATE]): + await setup_integration(hass, mock_serial_bridge_config_entry) + + await snapshot_platform( + hass, + entity_registry, + snapshot(), + mock_serial_bridge_config_entry.entry_id, + ) + + +@pytest.mark.parametrize( + ("val", "mode", "temp"), + [ + ( + [ + [100, 0, "U", "M", 210, 0, 0, "U"], + [650, 0, "O", "M", 500, 0, 0, "N"], + [0, 0], + ], + HVACMode.HEAT, + 21.0, + ), + ( + [ + [100, 1, "U", "A", 210, 1, 0, "O"], + [650, 0, "O", "M", 500, 0, 0, "N"], + [0, 0], + ], + HVACMode.HEAT, + 21.0, + ), + ( + [ + [100, 0, "O", "A", 210, 0, 0, "O"], + [650, 0, "O", "M", 500, 0, 0, "N"], + [0, 0], + ], + HVACMode.OFF, + 21.0, + ), + ], +) +async def test_climate_data_update( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_serial_bridge: AsyncMock, + mock_serial_bridge_config_entry: MockConfigEntry, + val: list[Any, Any], + mode: HVACMode, + temp: float, +) -> None: + """Test climate data update.""" + await setup_integration(hass, mock_serial_bridge_config_entry) + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == HVACMode.HEAT + assert state.attributes[ATTR_TEMPERATURE] == 5.0 + + mock_serial_bridge.get_all_devices.return_value[CLIMATE] = { + 0: ComelitSerialBridgeObject( + index=0, + name="Climate0", + status=0, + human_status="off", + type="climate", + val=val, + protected=0, + zone="Living room", + power=0.0, + power_unit=WATT, + ), + } + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == mode + assert state.attributes[ATTR_TEMPERATURE] == temp + + +async def test_climate_data_update_bad_data( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_serial_bridge: AsyncMock, + mock_serial_bridge_config_entry: MockConfigEntry, +) -> None: + """Test climate data update.""" + await setup_integration(hass, mock_serial_bridge_config_entry) + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == HVACMode.HEAT + assert state.attributes[ATTR_TEMPERATURE] == 5.0 + + mock_serial_bridge.get_all_devices.return_value[CLIMATE] = { + 0: ComelitSerialBridgeObject( + index=0, + name="Climate0", + status=0, + human_status="off", + type="climate", + val="bad_data", + protected=0, + zone="Living room", + power=0.0, + power_unit=WATT, + ), + } + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == HVACMode.HEAT + assert state.attributes[ATTR_TEMPERATURE] == 5.0 + + +async def test_climate_set_temperature( + hass: HomeAssistant, + mock_serial_bridge: AsyncMock, + mock_serial_bridge_config_entry: MockConfigEntry, +) -> None: + """Test climate set temperature service.""" + + await setup_integration(hass, mock_serial_bridge_config_entry) + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == HVACMode.HEAT + assert state.attributes[ATTR_TEMPERATURE] == 5.0 + + # Test set temperature + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 23}, + blocking=True, + ) + mock_serial_bridge.set_clima_status.assert_called() + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == HVACMode.HEAT + assert state.attributes[ATTR_TEMPERATURE] == 23.0 + + +async def test_climate_set_temperature_when_off( + hass: HomeAssistant, + mock_serial_bridge: AsyncMock, + mock_serial_bridge_config_entry: MockConfigEntry, +) -> None: + """Test climate set temperature service when off.""" + + await setup_integration(hass, mock_serial_bridge_config_entry) + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == HVACMode.HEAT + assert state.attributes[ATTR_TEMPERATURE] == 5.0 + + # Switch climate off + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.OFF}, + blocking=True, + ) + mock_serial_bridge.set_clima_status.assert_called() + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == HVACMode.OFF + + # Test set temperature + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 23}, + blocking=True, + ) + mock_serial_bridge.set_clima_status.assert_called() + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == HVACMode.OFF + + +async def test_climate_hvac_mode( + hass: HomeAssistant, + mock_serial_bridge: AsyncMock, + mock_serial_bridge_config_entry: MockConfigEntry, +) -> None: + """Test climate hvac mode service.""" + + await setup_integration(hass, mock_serial_bridge_config_entry) + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == HVACMode.HEAT + assert state.attributes[ATTR_TEMPERATURE] == 5.0 + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.OFF}, + blocking=True, + ) + mock_serial_bridge.set_clima_status.assert_called() + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == HVACMode.OFF + + +async def test_climate_hvac_mode_when_off( + hass: HomeAssistant, + mock_serial_bridge: AsyncMock, + mock_serial_bridge_config_entry: MockConfigEntry, +) -> None: + """Test climate hvac mode service when off.""" + + await setup_integration(hass, mock_serial_bridge_config_entry) + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == HVACMode.HEAT + assert state.attributes[ATTR_TEMPERATURE] == 5.0 + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.OFF}, + blocking=True, + ) + mock_serial_bridge.set_clima_status.assert_called() + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == HVACMode.OFF + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.AUTO}, + blocking=True, + ) + mock_serial_bridge.set_clima_status.assert_called() + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == HVACMode.AUTO diff --git a/tests/components/comelit/test_coordinator.py b/tests/components/comelit/test_coordinator.py new file mode 100644 index 00000000000..a8ef82a7e89 --- /dev/null +++ b/tests/components/comelit/test_coordinator.py @@ -0,0 +1,49 @@ +"""Tests for Comelit SimpleHome coordinator.""" + +from unittest.mock import AsyncMock + +from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData +from freezegun.api import FrozenDateTimeFactory +import pytest + +from homeassistant.components.comelit.const import SCAN_INTERVAL +from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed + + +@pytest.mark.parametrize( + "side_effect", + [ + CannotConnect, + CannotRetrieveData, + CannotAuthenticate, + ], +) +async def test_coordinator_data_update_fails( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_serial_bridge: AsyncMock, + mock_serial_bridge_config_entry: MockConfigEntry, + side_effect: Exception, +) -> None: + """Test coordinator data update exceptions.""" + + entity_id = "light.light0" + + await setup_integration(hass, mock_serial_bridge_config_entry) + + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF + + mock_serial_bridge.login.side_effect = side_effect + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/comelit/test_cover.py b/tests/components/comelit/test_cover.py new file mode 100644 index 00000000000..1d6c1435a5a --- /dev/null +++ b/tests/components/comelit/test_cover.py @@ -0,0 +1,161 @@ +"""Tests for Comelit SimpleHome cover platform.""" + +from unittest.mock import AsyncMock, patch + +from aiocomelit.api import ComelitSerialBridgeObject +from aiocomelit.const import COVER, WATT +from freezegun.api import FrozenDateTimeFactory +from syrupy import SnapshotAssertion + +from homeassistant.components.comelit.const import SCAN_INTERVAL +from homeassistant.components.cover import ( + DOMAIN as COVER_DOMAIN, + SERVICE_CLOSE_COVER, + SERVICE_OPEN_COVER, + SERVICE_STOP_COVER, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPENING, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + +ENTITY_ID = "cover.cover0" + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_serial_bridge: AsyncMock, + mock_serial_bridge_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.comelit.BRIDGE_PLATFORMS", [Platform.COVER]): + await setup_integration(hass, mock_serial_bridge_config_entry) + + await snapshot_platform( + hass, + entity_registry, + snapshot(), + mock_serial_bridge_config_entry.entry_id, + ) + + +async def test_cover_open( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_serial_bridge: AsyncMock, + mock_serial_bridge_config_entry: MockConfigEntry, +) -> None: + """Test cover open service.""" + + mock_serial_bridge.reset_mock() + await setup_integration(hass, mock_serial_bridge_config_entry) + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == STATE_UNKNOWN + + # Open cover + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, + ) + mock_serial_bridge.set_device_status.assert_called() + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == STATE_OPENING + + # Finish opening, update status + mock_serial_bridge.get_all_devices.return_value[COVER] = { + 0: ComelitSerialBridgeObject( + index=0, + name="Cover0", + status=0, + human_status="stopped", + type="cover", + val=0, + protected=0, + zone="Open space", + power=0.0, + power_unit=WATT, + ), + } + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == STATE_UNKNOWN + + +async def test_cover_close( + hass: HomeAssistant, + mock_serial_bridge: AsyncMock, + mock_serial_bridge_config_entry: MockConfigEntry, +) -> None: + """Test cover close and stop service.""" + + mock_serial_bridge.reset_mock() + await setup_integration(hass, mock_serial_bridge_config_entry) + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == STATE_UNKNOWN + + # Close cover + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, + ) + mock_serial_bridge.set_device_status.assert_called() + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == STATE_CLOSING + + # Stop cover + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_STOP_COVER, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, + ) + mock_serial_bridge.set_device_status.assert_called() + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == STATE_CLOSED + + +async def test_cover_stop_if_stopped( + hass: HomeAssistant, + mock_serial_bridge: AsyncMock, + mock_serial_bridge_config_entry: MockConfigEntry, +) -> None: + """Test cover stop service when already stopped.""" + + mock_serial_bridge.reset_mock() + await setup_integration(hass, mock_serial_bridge_config_entry) + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == STATE_UNKNOWN + + # Stop cover while not opening/closing + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_STOP_COVER, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, + ) + mock_serial_bridge.set_device_status.assert_not_called() + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == STATE_UNKNOWN diff --git a/tests/components/comelit/test_light.py b/tests/components/comelit/test_light.py new file mode 100644 index 00000000000..6c6de58c8ed --- /dev/null +++ b/tests/components/comelit/test_light.py @@ -0,0 +1,76 @@ +"""Tests for Comelit SimpleHome light platform.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.light import ( + DOMAIN as LIGHT_DOMAIN, + SERVICE_TOGGLE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + +ENTITY_ID = "light.light0" + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_serial_bridge: AsyncMock, + mock_serial_bridge_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.comelit.BRIDGE_PLATFORMS", [Platform.LIGHT]): + await setup_integration(hass, mock_serial_bridge_config_entry) + + await snapshot_platform( + hass, + entity_registry, + snapshot(), + mock_serial_bridge_config_entry.entry_id, + ) + + +@pytest.mark.parametrize( + ("service", "status"), + [ + (SERVICE_TURN_OFF, STATE_OFF), + (SERVICE_TURN_ON, STATE_ON), + (SERVICE_TOGGLE, STATE_ON), + ], +) +async def test_light_set_state( + hass: HomeAssistant, + mock_serial_bridge: AsyncMock, + mock_serial_bridge_config_entry: MockConfigEntry, + service: str, + status: str, +) -> None: + """Test light set state service.""" + + await setup_integration(hass, mock_serial_bridge_config_entry) + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == STATE_OFF + + # Test set temperature + await hass.services.async_call( + LIGHT_DOMAIN, + service, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, + ) + mock_serial_bridge.set_device_status.assert_called() + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == status diff --git a/tests/components/comelit/test_sensor.py b/tests/components/comelit/test_sensor.py new file mode 100644 index 00000000000..56409083165 --- /dev/null +++ b/tests/components/comelit/test_sensor.py @@ -0,0 +1,90 @@ +"""Tests for Comelit SimpleHome sensor platform.""" + +from unittest.mock import AsyncMock, patch + +from aiocomelit.api import AlarmDataObject, ComelitVedoAreaObject, ComelitVedoZoneObject +from aiocomelit.const import AlarmAreaState, AlarmZoneState +from freezegun.api import FrozenDateTimeFactory +from syrupy import SnapshotAssertion + +from homeassistant.components.comelit.const import SCAN_INTERVAL +from homeassistant.const import STATE_UNKNOWN, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + +ENTITY_ID = "sensor.zone0" + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_vedo: AsyncMock, + mock_vedo_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.comelit.VEDO_PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_vedo_config_entry) + + await snapshot_platform( + hass, + entity_registry, + snapshot(), + mock_vedo_config_entry.entry_id, + ) + + +async def test_sensor_state_unknown( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_vedo: AsyncMock, + mock_vedo_config_entry: MockConfigEntry, +) -> None: + """Test sensor unknown state.""" + + await setup_integration(hass, mock_vedo_config_entry) + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == AlarmZoneState.REST.value + + vedo_query = AlarmDataObject( + alarm_areas={ + 0: ComelitVedoAreaObject( + index=0, + name="Area0", + p1=True, + p2=True, + ready=False, + armed=True, + alarm=False, + alarm_memory=False, + sabotage=False, + anomaly=False, + in_time=False, + out_time=False, + human_status=AlarmAreaState.UNKNOWN, + ) + }, + alarm_zones={ + 0: ComelitVedoZoneObject( + index=0, + name="Zone0", + status_api="0x000", + status=0, + human_status=AlarmZoneState.UNKNOWN, + ) + }, + ) + + mock_vedo.get_all_areas_and_zones.return_value = vedo_query + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == STATE_UNKNOWN diff --git a/tests/components/comelit/test_switch.py b/tests/components/comelit/test_switch.py new file mode 100644 index 00000000000..fb9a4aab79a --- /dev/null +++ b/tests/components/comelit/test_switch.py @@ -0,0 +1,76 @@ +"""Tests for Comelit SimpleHome switch platform.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SERVICE_TOGGLE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + +ENTITY_ID = "switch.switch0" + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_serial_bridge: AsyncMock, + mock_serial_bridge_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.comelit.BRIDGE_PLATFORMS", [Platform.SWITCH]): + await setup_integration(hass, mock_serial_bridge_config_entry) + + await snapshot_platform( + hass, + entity_registry, + snapshot(), + mock_serial_bridge_config_entry.entry_id, + ) + + +@pytest.mark.parametrize( + ("service", "status"), + [ + (SERVICE_TURN_OFF, STATE_OFF), + (SERVICE_TURN_ON, STATE_ON), + (SERVICE_TOGGLE, STATE_ON), + ], +) +async def test_switch_set_state( + hass: HomeAssistant, + mock_serial_bridge: AsyncMock, + mock_serial_bridge_config_entry: MockConfigEntry, + service: str, + status: str, +) -> None: + """Test switch set state service.""" + + await setup_integration(hass, mock_serial_bridge_config_entry) + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == STATE_OFF + + # Test set temperature + await hass.services.async_call( + SWITCH_DOMAIN, + service, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, + ) + mock_serial_bridge.set_device_status.assert_called() + + assert (state := hass.states.get(ENTITY_ID)) + assert state.state == status diff --git a/tests/components/conversation/test_http.py b/tests/components/conversation/test_http.py index 6d69ec3c739..77fa97ad845 100644 --- a/tests/components/conversation/test_http.py +++ b/tests/components/conversation/test_http.py @@ -536,3 +536,60 @@ async def test_ws_hass_agent_debug_sentence_trigger( # Trigger should not have been executed assert len(calls) == 0 + + +async def test_ws_hass_language_scores( + hass: HomeAssistant, init_components, hass_ws_client: WebSocketGenerator +) -> None: + """Test getting language support scores.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id( + {"type": "conversation/agent/homeassistant/language_scores"} + ) + + msg = await client.receive_json() + assert msg["success"] + + # Sanity check + result = msg["result"] + assert result["languages"]["en-US"] == { + "cloud": 3, + "focused_local": 2, + "full_local": 3, + } + + +async def test_ws_hass_language_scores_with_filter( + hass: HomeAssistant, init_components, hass_ws_client: WebSocketGenerator +) -> None: + """Test getting language support scores with language/country filter.""" + client = await hass_ws_client(hass) + + # Language filter + await client.send_json_auto_id( + {"type": "conversation/agent/homeassistant/language_scores", "language": "de"} + ) + + msg = await client.receive_json() + assert msg["success"] + + # German should be preferred + result = msg["result"] + assert result["preferred_language"] == "de-DE" + + # Language/country filter + await client.send_json_auto_id( + { + "type": "conversation/agent/homeassistant/language_scores", + "language": "en", + "country": "GB", + } + ) + + msg = await client.receive_json() + assert msg["success"] + + # GB English should be preferred + result = msg["result"] + assert result["preferred_language"] == "en-GB" diff --git a/tests/components/conversation/test_util.py b/tests/components/conversation/test_util.py deleted file mode 100644 index 72a334232c1..00000000000 --- a/tests/components/conversation/test_util.py +++ /dev/null @@ -1,56 +0,0 @@ -"""Test the conversation utils.""" - -from homeassistant.components.conversation.util import create_matcher - - -def test_create_matcher() -> None: - """Test the create matcher method.""" - # Basic sentence - pattern = create_matcher("Hello world") - assert pattern.match("Hello world") is not None - - # Match a part - pattern = create_matcher("Hello {name}") - match = pattern.match("hello world") - assert match is not None - assert match.groupdict()["name"] == "world" - no_match = pattern.match("Hello world, how are you?") - assert no_match is None - - # Optional and matching part - pattern = create_matcher("Turn on [the] {name}") - match = pattern.match("turn on the kitchen lights") - assert match is not None - assert match.groupdict()["name"] == "kitchen lights" - match = pattern.match("turn on kitchen lights") - assert match is not None - assert match.groupdict()["name"] == "kitchen lights" - match = pattern.match("turn off kitchen lights") - assert match is None - - # Two different optional parts, 1 matching part - pattern = create_matcher("Turn on [the] [a] {name}") - match = pattern.match("turn on the kitchen lights") - assert match is not None - assert match.groupdict()["name"] == "kitchen lights" - match = pattern.match("turn on kitchen lights") - assert match is not None - assert match.groupdict()["name"] == "kitchen lights" - match = pattern.match("turn on a kitchen light") - assert match is not None - assert match.groupdict()["name"] == "kitchen light" - - # Strip plural - pattern = create_matcher("Turn {name}[s] on") - match = pattern.match("turn kitchen lights on") - assert match is not None - assert match.groupdict()["name"] == "kitchen light" - - # Optional 2 words - pattern = create_matcher("Turn [the great] {name} on") - match = pattern.match("turn the great kitchen lights on") - assert match is not None - assert match.groupdict()["name"] == "kitchen lights" - match = pattern.match("turn kitchen lights on") - assert match is not None - assert match.groupdict()["name"] == "kitchen lights" diff --git a/tests/components/duke_energy/conftest.py b/tests/components/duke_energy/conftest.py index f74ef43bf07..f82a2353557 100644 --- a/tests/components/duke_energy/conftest.py +++ b/tests/components/duke_energy/conftest.py @@ -61,8 +61,8 @@ def mock_api() -> Generator[AsyncMock]: ): api = mock_api.return_value api.authenticate.return_value = { - "email": "TEST@EXAMPLE.COM", - "cdp_internal_user_id": "test-username", + "loginEmailAddress": "TEST@EXAMPLE.COM", + "internalUserID": "test-username", } api.get_meters.return_value = {} yield api diff --git a/tests/components/emulated_roku/test_binding.py b/tests/components/emulated_roku/test_binding.py index 5bde72d2e4d..ec3f064dfe0 100644 --- a/tests/components/emulated_roku/test_binding.py +++ b/tests/components/emulated_roku/test_binding.py @@ -1,6 +1,7 @@ """Tests for emulated_roku library bindings.""" from unittest.mock import AsyncMock, Mock, patch +from uuid import uuid4 from homeassistant.components.emulated_roku.binding import ( ATTR_APP_ID, @@ -14,14 +15,15 @@ from homeassistant.components.emulated_roku.binding import ( ROKU_COMMAND_LAUNCH, EmulatedRoku, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import Event, HomeAssistant async def test_events_fired_properly(hass: HomeAssistant) -> None: """Test that events are fired correctly.""" - binding = EmulatedRoku( - hass, "Test Emulated Roku", "1.2.3.4", 8060, None, None, None - ) + random_name = uuid4().hex + # Note that this test is accessing the internal EmulatedRoku class + # and should be refactored in the future not to do so. + binding = EmulatedRoku(hass, "x", random_name, "1.2.3.4", 8060, None, None, None) events = [] roku_event_handler = None @@ -41,8 +43,9 @@ async def test_events_fired_properly(hass: HomeAssistant) -> None: return Mock(start=AsyncMock(), close=AsyncMock()) - def listener(event): - events.append(event) + def listener(event: Event) -> None: + if event.data[ATTR_SOURCE_NAME] == random_name: + events.append(event) with patch( "homeassistant.components.emulated_roku.binding.EmulatedRokuServer", instantiate @@ -53,10 +56,10 @@ async def test_events_fired_properly(hass: HomeAssistant) -> None: assert roku_event_handler is not None - roku_event_handler.on_keydown("Test Emulated Roku", "A") - roku_event_handler.on_keyup("Test Emulated Roku", "A") - roku_event_handler.on_keypress("Test Emulated Roku", "C") - roku_event_handler.launch("Test Emulated Roku", "1") + roku_event_handler.on_keydown(random_name, "A") + roku_event_handler.on_keyup(random_name, "A") + roku_event_handler.on_keypress(random_name, "C") + roku_event_handler.launch(random_name, "1") await hass.async_block_till_done() @@ -64,20 +67,20 @@ async def test_events_fired_properly(hass: HomeAssistant) -> None: assert events[0].event_type == EVENT_ROKU_COMMAND assert events[0].data[ATTR_COMMAND_TYPE] == ROKU_COMMAND_KEYDOWN - assert events[0].data[ATTR_SOURCE_NAME] == "Test Emulated Roku" + assert events[0].data[ATTR_SOURCE_NAME] == random_name assert events[0].data[ATTR_KEY] == "A" assert events[1].event_type == EVENT_ROKU_COMMAND assert events[1].data[ATTR_COMMAND_TYPE] == ROKU_COMMAND_KEYUP - assert events[1].data[ATTR_SOURCE_NAME] == "Test Emulated Roku" + assert events[1].data[ATTR_SOURCE_NAME] == random_name assert events[1].data[ATTR_KEY] == "A" assert events[2].event_type == EVENT_ROKU_COMMAND assert events[2].data[ATTR_COMMAND_TYPE] == ROKU_COMMAND_KEYPRESS - assert events[2].data[ATTR_SOURCE_NAME] == "Test Emulated Roku" + assert events[2].data[ATTR_SOURCE_NAME] == random_name assert events[2].data[ATTR_KEY] == "C" assert events[3].event_type == EVENT_ROKU_COMMAND assert events[3].data[ATTR_COMMAND_TYPE] == ROKU_COMMAND_LAUNCH - assert events[3].data[ATTR_SOURCE_NAME] == "Test Emulated Roku" + assert events[3].data[ATTR_SOURCE_NAME] == random_name assert events[3].data[ATTR_APP_ID] == "1" diff --git a/tests/components/emulated_roku/test_init.py b/tests/components/emulated_roku/test_init.py index cf2a415f19c..473e0c662aa 100644 --- a/tests/components/emulated_roku/test_init.py +++ b/tests/components/emulated_roku/test_init.py @@ -86,16 +86,6 @@ async def test_setup_entry_successful(hass: HomeAssistant) -> None: assert await emulated_roku.async_setup_entry(hass, entry) is True assert len(instantiate.mock_calls) == 1 - assert hass.data[emulated_roku.DOMAIN] - - roku_instance = hass.data[emulated_roku.DOMAIN]["Emulated Roku Test"] - - assert roku_instance.roku_usn == "Emulated Roku Test" - assert roku_instance.host_ip == "1.2.3.5" - assert roku_instance.listen_port == 8060 - assert roku_instance.advertise_ip == "1.2.3.4" - assert roku_instance.advertise_port == 8071 - assert roku_instance.bind_multicast is False async def test_unload_entry(hass: HomeAssistant) -> None: @@ -113,10 +103,6 @@ async def test_unload_entry(hass: HomeAssistant) -> None: ): assert await emulated_roku.async_setup_entry(hass, entry) is True - assert emulated_roku.DOMAIN in hass.data - await hass.async_block_till_done() assert await emulated_roku.async_unload_entry(hass, entry) - - assert len(hass.data[emulated_roku.DOMAIN]) == 0 diff --git a/tests/components/energy/test_websocket_api.py b/tests/components/energy/test_websocket_api.py index 959ec7d1687..e4b0e568a70 100644 --- a/tests/components/energy/test_websocket_api.py +++ b/tests/components/energy/test_websocket_api.py @@ -149,7 +149,13 @@ async def test_save_preferences( "stat_energy_to": "my_battery_charging", }, ], - "device_consumption": [{"stat_consumption": "some_device_usage"}], + "device_consumption": [ + { + "stat_consumption": "some_device_usage", + "name": "My Device", + "included_in_stat": "sensor.some_other_device", + } + ], } await client.send_json({"id": 6, "type": "energy/save_prefs", **new_prefs}) diff --git a/tests/components/enphase_envoy/snapshots/test_sensor.ambr b/tests/components/enphase_envoy/snapshots/test_sensor.ambr index c1e2c9270e2..101caaf1aea 100644 --- a/tests/components/enphase_envoy/snapshots/test_sensor.ambr +++ b/tests/components/enphase_envoy/snapshots/test_sensor.ambr @@ -361,7 +361,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption', + 'original_name': 'Balanced net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -374,7 +374,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'friendly_name': 'Envoy 1234 Balanced net power consumption', 'state_class': , 'unit_of_measurement': , }), @@ -1456,7 +1456,7 @@ 'state': '0.3', }) # --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_powerfactor_net_consumption_ct-entry] +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_power_factor_net_consumption_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1471,7 +1471,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -1486,7 +1486,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT', + 'original_name': 'Power factor net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -1495,22 +1495,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_powerfactor_net_consumption_ct-state] +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_power_factor_net_consumption_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.21', }) # --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_powerfactor_production_ct-entry] +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_power_factor_production_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1525,7 +1525,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -1540,7 +1540,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'powerfactor production CT', + 'original_name': 'Power factor production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -1549,15 +1549,15 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_powerfactor_production_ct-state] +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_power_factor_production_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'friendly_name': 'Envoy 1234 Power factor production CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'last_changed': , 'last_reported': , 'last_updated': , @@ -2519,7 +2519,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption', + 'original_name': 'Balanced net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -2532,7 +2532,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'friendly_name': 'Envoy 1234 Balanced net power consumption', 'state_class': , 'unit_of_measurement': , }), @@ -5374,7 +5374,7 @@ 'state': '0.3', }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct-entry] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_net_consumption_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -5389,7 +5389,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -5404,7 +5404,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT', + 'original_name': 'Power factor net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -5413,22 +5413,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct-state] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_net_consumption_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.21', }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-entry] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -5443,7 +5443,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -5458,7 +5458,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l1', + 'original_name': 'Power factor net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -5467,22 +5467,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-state] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l1', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l1', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.22', }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-entry] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -5497,7 +5497,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -5512,7 +5512,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l2', + 'original_name': 'Power factor net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -5521,22 +5521,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-state] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l2', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l2', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.23', }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-entry] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l3-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -5551,7 +5551,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l3', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -5566,7 +5566,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l3', + 'original_name': 'Power factor net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -5575,22 +5575,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-state] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l3', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l3', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.24', }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct-entry] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_production_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -5605,7 +5605,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -5620,7 +5620,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'powerfactor production CT', + 'original_name': 'Power factor production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -5629,22 +5629,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct-state] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_production_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'friendly_name': 'Envoy 1234 Power factor production CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.11', }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l1-entry] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_production_ct_l1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -5659,7 +5659,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -5674,7 +5674,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l1', + 'original_name': 'Power factor production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -5683,22 +5683,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l1-state] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_production_ct_l1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l1', + 'friendly_name': 'Envoy 1234 Power factor production CT l1', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.12', }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l2-entry] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_production_ct_l2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -5713,7 +5713,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -5728,7 +5728,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l2', + 'original_name': 'Power factor production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -5737,22 +5737,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l2-state] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_production_ct_l2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l2', + 'friendly_name': 'Envoy 1234 Power factor production CT l2', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.13', }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l3-entry] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_production_ct_l3-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -5767,7 +5767,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l3', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -5782,7 +5782,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l3', + 'original_name': 'Power factor production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -5791,15 +5791,15 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l3-state] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_production_ct_l3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l3', + 'friendly_name': 'Envoy 1234 Power factor production CT l3', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l3', 'last_changed': , 'last_reported': , 'last_updated': , @@ -7026,7 +7026,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption', + 'original_name': 'Balanced net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -7039,7 +7039,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'friendly_name': 'Envoy 1234 Balanced net power consumption', 'state_class': , 'unit_of_measurement': , }), @@ -9881,7 +9881,7 @@ 'state': '0.3', }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct-entry] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_net_consumption_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -9896,7 +9896,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -9911,7 +9911,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT', + 'original_name': 'Power factor net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -9920,22 +9920,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct-state] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_net_consumption_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.21', }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-entry] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -9950,7 +9950,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -9965,7 +9965,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l1', + 'original_name': 'Power factor net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -9974,22 +9974,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-state] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l1', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l1', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.22', }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-entry] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -10004,7 +10004,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -10019,7 +10019,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l2', + 'original_name': 'Power factor net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -10028,22 +10028,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-state] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l2', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l2', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.23', }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-entry] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l3-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -10058,7 +10058,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l3', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -10073,7 +10073,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l3', + 'original_name': 'Power factor net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -10082,22 +10082,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-state] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l3', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l3', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.24', }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct-entry] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_production_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -10112,7 +10112,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -10127,7 +10127,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'powerfactor production CT', + 'original_name': 'Power factor production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -10136,22 +10136,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct-state] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_production_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'friendly_name': 'Envoy 1234 Power factor production CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.11', }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l1-entry] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_production_ct_l1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -10166,7 +10166,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -10181,7 +10181,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l1', + 'original_name': 'Power factor production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -10190,22 +10190,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l1-state] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_production_ct_l1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l1', + 'friendly_name': 'Envoy 1234 Power factor production CT l1', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.12', }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l2-entry] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_production_ct_l2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -10220,7 +10220,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -10235,7 +10235,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l2', + 'original_name': 'Power factor production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -10244,22 +10244,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l2-state] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_production_ct_l2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l2', + 'friendly_name': 'Envoy 1234 Power factor production CT l2', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.13', }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l3-entry] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_production_ct_l3-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -10274,7 +10274,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l3', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -10289,7 +10289,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l3', + 'original_name': 'Power factor production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -10298,15 +10298,15 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l3-state] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_production_ct_l3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l3', + 'friendly_name': 'Envoy 1234 Power factor production CT l3', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l3', 'last_changed': , 'last_reported': , 'last_updated': , @@ -11630,7 +11630,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption', + 'original_name': 'Balanced net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -11643,7 +11643,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'friendly_name': 'Envoy 1234 Balanced net power consumption', 'state_class': , 'unit_of_measurement': , }), @@ -11688,7 +11688,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption l1', + 'original_name': 'Balanced net power consumption l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -11701,7 +11701,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption l1', + 'friendly_name': 'Envoy 1234 Balanced net power consumption l1', 'state_class': , 'unit_of_measurement': , }), @@ -11746,7 +11746,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption l2', + 'original_name': 'Balanced net power consumption l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -11759,7 +11759,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption l2', + 'friendly_name': 'Envoy 1234 Balanced net power consumption l2', 'state_class': , 'unit_of_measurement': , }), @@ -11804,7 +11804,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption l3', + 'original_name': 'Balanced net power consumption l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -11817,7 +11817,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption l3', + 'friendly_name': 'Envoy 1234 Balanced net power consumption l3', 'state_class': , 'unit_of_measurement': , }), @@ -17547,7 +17547,7 @@ 'state': '0.3', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_net_consumption_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -17562,7 +17562,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -17577,7 +17577,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT', + 'original_name': 'Power factor net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -17586,22 +17586,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_net_consumption_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.21', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_net_consumption_ct_l1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -17616,7 +17616,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -17631,7 +17631,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l1', + 'original_name': 'Power factor net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -17640,22 +17640,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_net_consumption_ct_l1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l1', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l1', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.22', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_net_consumption_ct_l2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -17670,7 +17670,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -17685,7 +17685,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l2', + 'original_name': 'Power factor net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -17694,22 +17694,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_net_consumption_ct_l2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l2', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l2', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.23', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_net_consumption_ct_l3-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -17724,7 +17724,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l3', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -17739,7 +17739,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l3', + 'original_name': 'Power factor net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -17748,22 +17748,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_net_consumption_ct_l3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l3', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l3', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.24', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_production_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -17778,7 +17778,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -17793,7 +17793,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'powerfactor production CT', + 'original_name': 'Power factor production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -17802,22 +17802,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_production_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'friendly_name': 'Envoy 1234 Power factor production CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.11', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l1-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_production_ct_l1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -17832,7 +17832,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -17847,7 +17847,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l1', + 'original_name': 'Power factor production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -17856,22 +17856,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l1-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_production_ct_l1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l1', + 'friendly_name': 'Envoy 1234 Power factor production CT l1', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.12', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l2-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_production_ct_l2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -17886,7 +17886,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -17901,7 +17901,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l2', + 'original_name': 'Power factor production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -17910,22 +17910,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l2-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_production_ct_l2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l2', + 'friendly_name': 'Envoy 1234 Power factor production CT l2', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.13', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l3-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_production_ct_l3-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -17940,7 +17940,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l3', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -17955,7 +17955,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l3', + 'original_name': 'Power factor production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -17964,22 +17964,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l3-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_production_ct_l3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l3', + 'friendly_name': 'Envoy 1234 Power factor production CT l3', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.14', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_storage_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -17994,7 +17994,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_storage_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -18009,7 +18009,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor storage CT', + 'original_name': 'Power factor storage CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -18018,22 +18018,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_storage_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor storage CT', + 'friendly_name': 'Envoy 1234 Power factor storage CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_storage_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.23', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l1-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_storage_ct_l1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -18048,7 +18048,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_storage_ct_l1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -18063,7 +18063,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor storage CT l1', + 'original_name': 'Power factor storage CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -18072,22 +18072,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l1-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_storage_ct_l1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor storage CT l1', + 'friendly_name': 'Envoy 1234 Power factor storage CT l1', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_storage_ct_l1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.32', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l2-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_storage_ct_l2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -18102,7 +18102,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_storage_ct_l2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -18117,7 +18117,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor storage CT l2', + 'original_name': 'Power factor storage CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -18126,22 +18126,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l2-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_storage_ct_l2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor storage CT l2', + 'friendly_name': 'Envoy 1234 Power factor storage CT l2', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_storage_ct_l2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.23', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l3-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_storage_ct_l3-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -18156,7 +18156,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_storage_ct_l3', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -18171,7 +18171,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor storage CT l3', + 'original_name': 'Power factor storage CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -18180,15 +18180,15 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l3-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_storage_ct_l3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor storage CT l3', + 'friendly_name': 'Envoy 1234 Power factor storage CT l3', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_storage_ct_l3', 'last_changed': , 'last_reported': , 'last_updated': , @@ -19586,7 +19586,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption', + 'original_name': 'Balanced net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -19599,7 +19599,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'friendly_name': 'Envoy 1234 Balanced net power consumption', 'state_class': , 'unit_of_measurement': , }), @@ -19644,7 +19644,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption l1', + 'original_name': 'Balanced net power consumption l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -19657,7 +19657,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption l1', + 'friendly_name': 'Envoy 1234 Balanced net power consumption l1', 'state_class': , 'unit_of_measurement': , }), @@ -19702,7 +19702,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption l2', + 'original_name': 'Balanced net power consumption l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -19715,7 +19715,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption l2', + 'friendly_name': 'Envoy 1234 Balanced net power consumption l2', 'state_class': , 'unit_of_measurement': , }), @@ -19760,7 +19760,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption l3', + 'original_name': 'Balanced net power consumption l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -19773,7 +19773,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption l3', + 'friendly_name': 'Envoy 1234 Balanced net power consumption l3', 'state_class': , 'unit_of_measurement': , }), @@ -24065,7 +24065,7 @@ 'state': '0.3', }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct-entry] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_net_consumption_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -24080,7 +24080,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -24095,7 +24095,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT', + 'original_name': 'Power factor net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -24104,22 +24104,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct-state] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_net_consumption_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.21', }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-entry] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_net_consumption_ct_l1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -24134,7 +24134,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -24149,7 +24149,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l1', + 'original_name': 'Power factor net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -24158,22 +24158,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-state] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_net_consumption_ct_l1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l1', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l1', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.22', }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-entry] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_net_consumption_ct_l2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -24188,7 +24188,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -24203,7 +24203,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l2', + 'original_name': 'Power factor net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -24212,22 +24212,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-state] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_net_consumption_ct_l2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l2', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l2', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.23', }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-entry] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_net_consumption_ct_l3-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -24242,7 +24242,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l3', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -24257,7 +24257,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l3', + 'original_name': 'Power factor net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -24266,22 +24266,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-state] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_net_consumption_ct_l3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l3', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l3', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.24', }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct-entry] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_production_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -24296,7 +24296,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -24311,7 +24311,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'powerfactor production CT', + 'original_name': 'Power factor production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -24320,22 +24320,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct-state] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_production_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'friendly_name': 'Envoy 1234 Power factor production CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.11', }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l1-entry] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_production_ct_l1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -24350,7 +24350,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -24365,7 +24365,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l1', + 'original_name': 'Power factor production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -24374,22 +24374,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l1-state] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_production_ct_l1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l1', + 'friendly_name': 'Envoy 1234 Power factor production CT l1', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.12', }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l2-entry] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_production_ct_l2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -24404,7 +24404,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -24419,7 +24419,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l2', + 'original_name': 'Power factor production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -24428,22 +24428,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l2-state] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_production_ct_l2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l2', + 'friendly_name': 'Envoy 1234 Power factor production CT l2', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.13', }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l3-entry] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_production_ct_l3-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -24458,7 +24458,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l3', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -24473,7 +24473,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l3', + 'original_name': 'Power factor production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -24482,15 +24482,15 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l3-state] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_production_ct_l3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l3', + 'friendly_name': 'Envoy 1234 Power factor production CT l3', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l3', 'last_changed': , 'last_reported': , 'last_updated': , @@ -25326,7 +25326,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption', + 'original_name': 'Balanced net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -25339,7 +25339,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'friendly_name': 'Envoy 1234 Balanced net power consumption', 'state_class': , 'unit_of_measurement': , }), @@ -25799,7 +25799,7 @@ 'state': 'normal', }) # --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_powerfactor_production_ct-entry] +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_power_factor_production_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -25814,7 +25814,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -25829,7 +25829,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'powerfactor production CT', + 'original_name': 'Power factor production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -25838,15 +25838,15 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_powerfactor_production_ct-state] +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_power_factor_production_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'friendly_name': 'Envoy 1234 Power factor production CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/esphome/test_assist_satellite.py b/tests/components/esphome/test_assist_satellite.py index 329a7b5179a..2254d24c9ac 100644 --- a/tests/components/esphome/test_assist_satellite.py +++ b/tests/components/esphome/test_assist_satellite.py @@ -25,7 +25,12 @@ from aioesphomeapi import ( ) import pytest -from homeassistant.components import assist_satellite, conversation, tts +from homeassistant.components import ( + assist_pipeline, + assist_satellite, + conversation, + tts, +) from homeassistant.components.assist_pipeline import PipelineEvent, PipelineEventType from homeassistant.components.assist_satellite import ( AssistSatelliteConfiguration, @@ -53,6 +58,7 @@ from homeassistant.helpers import ( intent as intent_helper, ) from homeassistant.helpers.entity_component import EntityComponent +from homeassistant.helpers.network import get_url from .conftest import MockESPHomeDevice @@ -128,8 +134,6 @@ async def test_pipeline_api_audio( ) -> None: """Test a complete pipeline run with API audio (over the TCP connection).""" conversation_id = "test-conversation-id" - media_url = "http://test.url" - media_id = "test-media-id" mock_device: MockESPHomeDevice = await mock_esphome_device( mock_client=mock_client, @@ -323,15 +327,22 @@ async def test_pipeline_api_audio( assert satellite.state == AssistSatelliteState.RESPONDING # Should return mock_wav audio + mock_tts_result_stream = MockResultStream(hass, "wav", mock_wav) event_callback( PipelineEvent( type=PipelineEventType.TTS_END, - data={"tts_output": {"url": media_url, "media_id": media_id}}, + data={ + "tts_output": { + "media_id": "test-media-id", + "url": mock_tts_result_stream.url, + "token": mock_tts_result_stream.token, + } + }, ) ) assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( VoiceAssistantEventType.VOICE_ASSISTANT_TTS_END, - {"url": media_url}, + {"url": get_url(hass) + mock_tts_result_stream.url}, ) event_callback(PipelineEvent(type=PipelineEventType.RUN_END)) @@ -350,12 +361,6 @@ async def test_pipeline_api_audio( original_handle_pipeline_finished() pipeline_finished.set() - async def async_get_media_source_audio( - hass: HomeAssistant, - media_source_id: str, - ) -> tuple[str, bytes]: - return ("wav", mock_wav) - tts_finished = asyncio.Event() original_tts_response_finished = satellite.tts_response_finished @@ -368,10 +373,6 @@ async def test_pipeline_api_audio( "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", new=async_pipeline_from_audio_stream, ), - patch( - "homeassistant.components.tts.async_get_media_source_audio", - new=async_get_media_source_audio, - ), patch.object(satellite, "handle_pipeline_finished", handle_pipeline_finished), patch.object(satellite, "_stream_tts_audio", _stream_tts_audio), patch.object(satellite, "tts_response_finished", tts_response_finished), @@ -429,8 +430,6 @@ async def test_pipeline_udp_audio( mainly focused on the UDP server. """ conversation_id = "test-conversation-id" - media_url = "http://test.url" - media_id = "test-media-id" mock_device: MockESPHomeDevice = await mock_esphome_device( mock_client=mock_client, @@ -517,10 +516,17 @@ async def test_pipeline_udp_audio( ) # Should return mock_wav audio + mock_tts_result_stream = MockResultStream(hass, "wav", mock_wav) event_callback( PipelineEvent( type=PipelineEventType.TTS_END, - data={"tts_output": {"url": media_url, "media_id": media_id}}, + data={ + "tts_output": { + "media_id": "test-media-id", + "url": mock_tts_result_stream.url, + "token": mock_tts_result_stream.token, + } + }, ) ) @@ -533,12 +539,6 @@ async def test_pipeline_udp_audio( original_handle_pipeline_finished() pipeline_finished.set() - async def async_get_media_source_audio( - hass: HomeAssistant, - media_source_id: str, - ) -> tuple[str, bytes]: - return ("wav", mock_wav) - tts_finished = asyncio.Event() original_tts_response_finished = satellite.tts_response_finished @@ -562,10 +562,6 @@ async def test_pipeline_udp_audio( "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", new=async_pipeline_from_audio_stream, ), - patch( - "homeassistant.components.tts.async_get_media_source_audio", - new=async_get_media_source_audio, - ), patch.object(satellite, "handle_pipeline_finished", handle_pipeline_finished), patch.object(satellite, "tts_response_finished", tts_response_finished), ): @@ -647,8 +643,6 @@ async def test_pipeline_media_player( mainly focused on tts_response_finished getting automatically called. """ conversation_id = "test-conversation-id" - media_url = "http://test.url" - media_id = "test-media-id" mock_device: MockESPHomeDevice = await mock_esphome_device( mock_client=mock_client, @@ -728,10 +722,17 @@ async def test_pipeline_media_player( ) # Should return mock_wav audio + mock_tts_result_stream = MockResultStream(hass, "wav", mock_wav) event_callback( PipelineEvent( type=PipelineEventType.TTS_END, - data={"tts_output": {"url": media_url, "media_id": media_id}}, + data={ + "tts_output": { + "media_id": "test-media-id", + "url": mock_tts_result_stream.url, + "token": mock_tts_result_stream.token, + } + }, ) ) @@ -744,12 +745,6 @@ async def test_pipeline_media_player( original_handle_pipeline_finished() pipeline_finished.set() - async def async_get_media_source_audio( - hass: HomeAssistant, - media_source_id: str, - ) -> tuple[str, bytes]: - return ("wav", mock_wav) - tts_finished = asyncio.Event() original_tts_response_finished = satellite.tts_response_finished @@ -762,10 +757,6 @@ async def test_pipeline_media_player( "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", new=async_pipeline_from_audio_stream, ), - patch( - "homeassistant.components.tts.async_get_media_source_audio", - new=async_get_media_source_audio, - ), patch.object(satellite, "handle_pipeline_finished", handle_pipeline_finished), patch.object(satellite, "tts_response_finished", tts_response_finished), ): @@ -939,80 +930,63 @@ async def test_streaming_tts_errors( # Should not stream if not running satellite._is_running = False - await satellite._stream_tts_audio("test-media-id") + await satellite._stream_tts_audio(MockResultStream(hass, "wav", mock_wav)) mock_client.send_voice_assistant_audio.assert_not_called() satellite._is_running = True # Should only stream WAV - async def get_mp3( - hass: HomeAssistant, - media_source_id: str, - ) -> tuple[str, bytes]: - return ("mp3", b"") - - with patch( - "homeassistant.components.tts.async_get_media_source_audio", new=get_mp3 - ): - await satellite._stream_tts_audio("test-media-id") - mock_client.send_voice_assistant_audio.assert_not_called() + await satellite._stream_tts_audio(MockResultStream(hass, "mp3", b"")) + mock_client.send_voice_assistant_audio.assert_not_called() # Needs to be the correct sample rate, etc. - async def get_bad_wav( - hass: HomeAssistant, - media_source_id: str, - ) -> tuple[str, bytes]: - with io.BytesIO() as wav_io: - with wave.open(wav_io, "wb") as wav_file: - wav_file.setframerate(48000) - wav_file.setsampwidth(2) - wav_file.setnchannels(1) - wav_file.writeframes(b"test-wav") + with io.BytesIO() as wav_io: + with wave.open(wav_io, "wb") as wav_file: + wav_file.setframerate(48000) + wav_file.setsampwidth(2) + wav_file.setnchannels(1) + wav_file.writeframes(b"test-wav") - return ("wav", wav_io.getvalue()) + mock_tts_result_stream = MockResultStream(hass, "wav", wav_io.getvalue()) - with patch( - "homeassistant.components.tts.async_get_media_source_audio", new=get_bad_wav - ): - await satellite._stream_tts_audio("test-media-id") - mock_client.send_voice_assistant_audio.assert_not_called() + await satellite._stream_tts_audio(mock_tts_result_stream) + mock_client.send_voice_assistant_audio.assert_not_called() # Check that TTS_STREAM_* events still get sent after cancel media_fetched = asyncio.Event() - async def get_slow_wav( - hass: HomeAssistant, - media_source_id: str, - ) -> tuple[str, bytes]: + mock_tts_result_stream = MockResultStream(hass, "wav", b"") + + async def async_stream_result_slowly(): media_fetched.set() await asyncio.sleep(1) - return ("wav", mock_wav) + yield mock_wav + + mock_tts_result_stream.async_stream_result = async_stream_result_slowly mock_client.send_voice_assistant_event.reset_mock() - with patch( - "homeassistant.components.tts.async_get_media_source_audio", new=get_slow_wav - ): - task = asyncio.create_task(satellite._stream_tts_audio("test-media-id")) - async with asyncio.timeout(1): - # Wait for media to be fetched - await media_fetched.wait() - # Cancel task - task.cancel() - await task + task = asyncio.create_task(satellite._stream_tts_audio(mock_tts_result_stream)) + async with asyncio.timeout(1): + # Wait for media to be fetched + await media_fetched.wait() - # No audio should have gone out - mock_client.send_voice_assistant_audio.assert_not_called() - assert len(mock_client.send_voice_assistant_event.call_args_list) == 2 + # Cancel task + task.cancel() + await task - # The TTS_STREAM_* events should have gone out - assert mock_client.send_voice_assistant_event.call_args_list[-2].args == ( - VoiceAssistantEventType.VOICE_ASSISTANT_TTS_STREAM_START, - {}, - ) - assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( - VoiceAssistantEventType.VOICE_ASSISTANT_TTS_STREAM_END, - {}, - ) + # No audio should have gone out + mock_client.send_voice_assistant_audio.assert_not_called() + assert len(mock_client.send_voice_assistant_event.call_args_list) == 2 + + # The TTS_STREAM_* events should have gone out + assert mock_client.send_voice_assistant_event.call_args_list[-2].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_TTS_STREAM_START, + {}, + ) + assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_TTS_STREAM_END, + {}, + ) async def test_tts_format_from_media_player( @@ -1160,7 +1134,7 @@ async def test_announce_supported_features( Awaitable[MockESPHomeDevice], ], ) -> None: - """Test that the announce supported feature is set by flags.""" + """Test that the announce supported feature is not set by default.""" mock_device: MockESPHomeDevice = await mock_esphome_device( mock_client=mock_client, entity_info=[], @@ -1207,11 +1181,17 @@ async def test_announce_message( done = asyncio.Event() async def send_voice_assistant_announcement_await_response( - media_id: str, timeout: float, text: str + media_id: str, + timeout: float, + text: str, + start_conversation: bool, + preannounce_media_id: str | None = None, ): assert satellite.state == AssistSatelliteState.RESPONDING assert media_id == "http://10.10.10.10:8123/api/tts_proxy/test-token" assert text == "test-text" + assert not start_conversation + assert not preannounce_media_id done.set() @@ -1238,7 +1218,11 @@ async def test_announce_message( await hass.services.async_call( assist_satellite.DOMAIN, "announce", - {"entity_id": satellite.entity_id, "message": "test-text"}, + { + "entity_id": satellite.entity_id, + "message": "test-text", + "preannounce_media_id": None, + }, blocking=True, ) await done.wait() @@ -1296,10 +1280,16 @@ async def test_announce_media_id( done = asyncio.Event() async def send_voice_assistant_announcement_await_response( - media_id: str, timeout: float, text: str + media_id: str, + timeout: float, + text: str, + start_conversation: bool, + preannounce_media_id: str | None = None, ): assert satellite.state == AssistSatelliteState.RESPONDING assert media_id == "https://www.home-assistant.io/proxied.flac" + assert not start_conversation + assert not preannounce_media_id done.set() @@ -1321,6 +1311,7 @@ async def test_announce_media_id( { "entity_id": satellite.entity_id, "media_id": "https://www.home-assistant.io/resolved.mp3", + "preannounce_media_id": None, }, blocking=True, ) @@ -1328,9 +1319,9 @@ async def test_announce_media_id( assert satellite.state == AssistSatelliteState.IDLE mock_async_create_proxy_url.assert_called_once_with( - hass, - dev.id, - "https://www.home-assistant.io/resolved.mp3", + hass=hass, + device_id=dev.id, + media_url="https://www.home-assistant.io/resolved.mp3", media_format="flac", rate=48000, channels=2, @@ -1338,6 +1329,422 @@ async def test_announce_media_id( ) +async def test_announce_message_with_preannounce( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test announcement with message and preannounce media id.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.SPEAKER + | VoiceAssistantFeature.API_AUDIO + | VoiceAssistantFeature.ANNOUNCE + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + done = asyncio.Event() + + async def send_voice_assistant_announcement_await_response( + media_id: str, + timeout: float, + text: str, + start_conversation: bool, + preannounce_media_id: str | None = None, + ): + assert satellite.state == AssistSatelliteState.RESPONDING + assert media_id == "http://10.10.10.10:8123/api/tts_proxy/test-token" + assert text == "test-text" + assert not start_conversation + assert preannounce_media_id == "test-preannounce" + + done.set() + + with ( + patch( + "homeassistant.components.tts.generate_media_source_id", + return_value="media-source://bla", + ), + patch( + "homeassistant.components.tts.async_resolve_engine", + return_value="tts.cloud_tts", + ), + patch( + "homeassistant.components.tts.async_create_stream", + return_value=MockResultStream(hass, "wav", b""), + ), + patch.object( + mock_client, + "send_voice_assistant_announcement_await_response", + new=send_voice_assistant_announcement_await_response, + ), + ): + async with asyncio.timeout(1): + await hass.services.async_call( + assist_satellite.DOMAIN, + "announce", + { + "entity_id": satellite.entity_id, + "message": "test-text", + "preannounce_media_id": "test-preannounce", + }, + blocking=True, + ) + await done.wait() + assert satellite.state == AssistSatelliteState.IDLE + + +async def test_start_conversation_supported_features( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test that the start conversation supported feature is not set by default.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + assert not ( + satellite.supported_features & AssistSatelliteEntityFeature.START_CONVERSATION + ) + + +async def test_start_conversation_message( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test start conversation with message.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.SPEAKER + | VoiceAssistantFeature.API_AUDIO + | VoiceAssistantFeature.ANNOUNCE + | VoiceAssistantFeature.START_CONVERSATION + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + pipeline = assist_pipeline.Pipeline( + conversation_engine="test engine", + conversation_language="en", + language="en", + name="test pipeline", + stt_engine="test stt", + stt_language="en", + tts_engine="test tts", + tts_language="en", + tts_voice=None, + wake_word_entity=None, + wake_word_id=None, + ) + + done = asyncio.Event() + + async def send_voice_assistant_announcement_await_response( + media_id: str, + timeout: float, + text: str, + start_conversation: bool, + preannounce_media_id: str, + ): + assert satellite.state == AssistSatelliteState.RESPONDING + assert media_id == "http://10.10.10.10:8123/api/tts_proxy/test-token" + assert text == "test-text" + assert start_conversation + assert not preannounce_media_id + + done.set() + + with ( + patch( + "homeassistant.components.tts.generate_media_source_id", + return_value="media-source://bla", + ), + patch( + "homeassistant.components.tts.async_resolve_engine", + return_value="tts.cloud_tts", + ), + patch( + "homeassistant.components.tts.async_create_stream", + return_value=MockResultStream(hass, "wav", b""), + ), + patch.object( + mock_client, + "send_voice_assistant_announcement_await_response", + new=send_voice_assistant_announcement_await_response, + ), + patch( + "homeassistant.components.assist_satellite.entity.async_get_pipeline", + return_value=pipeline, + ), + ): + async with asyncio.timeout(1): + await hass.services.async_call( + assist_satellite.DOMAIN, + "start_conversation", + { + "entity_id": satellite.entity_id, + "start_message": "test-text", + "preannounce_media_id": None, + }, + blocking=True, + ) + await done.wait() + assert satellite.state == AssistSatelliteState.IDLE + + +async def test_start_conversation_media_id( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], + device_registry: dr.DeviceRegistry, +) -> None: + """Test start conversation with media id.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[ + MediaPlayerInfo( + object_id="mymedia_player", + key=1, + name="my media_player", + unique_id="my_media_player", + supports_pause=True, + supported_formats=[ + MediaPlayerSupportedFormat( + format="flac", + sample_rate=48000, + num_channels=2, + purpose=MediaPlayerFormatPurpose.ANNOUNCEMENT, + sample_bytes=2, + ), + ], + ) + ], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.SPEAKER + | VoiceAssistantFeature.API_AUDIO + | VoiceAssistantFeature.ANNOUNCE + | VoiceAssistantFeature.START_CONVERSATION + }, + ) + await hass.async_block_till_done() + + dev = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, mock_device.entry.unique_id)} + ) + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + pipeline = assist_pipeline.Pipeline( + conversation_engine="test engine", + conversation_language="en", + language="en", + name="test pipeline", + stt_engine="test stt", + stt_language="en", + tts_engine="test tts", + tts_language="en", + tts_voice=None, + wake_word_entity=None, + wake_word_id=None, + ) + + done = asyncio.Event() + + async def send_voice_assistant_announcement_await_response( + media_id: str, + timeout: float, + text: str, + start_conversation: bool, + preannounce_media_id: str, + ): + assert satellite.state == AssistSatelliteState.RESPONDING + assert media_id == "https://www.home-assistant.io/proxied.flac" + assert start_conversation + assert not preannounce_media_id + + done.set() + + with ( + patch.object( + mock_client, + "send_voice_assistant_announcement_await_response", + new=send_voice_assistant_announcement_await_response, + ), + patch( + "homeassistant.components.esphome.assist_satellite.async_create_proxy_url", + return_value="https://www.home-assistant.io/proxied.flac", + ) as mock_async_create_proxy_url, + patch( + "homeassistant.components.assist_satellite.entity.async_get_pipeline", + return_value=pipeline, + ), + ): + async with asyncio.timeout(1): + await hass.services.async_call( + assist_satellite.DOMAIN, + "start_conversation", + { + "entity_id": satellite.entity_id, + "start_media_id": "https://www.home-assistant.io/resolved.mp3", + "preannounce_media_id": None, + }, + blocking=True, + ) + await done.wait() + assert satellite.state == AssistSatelliteState.IDLE + + mock_async_create_proxy_url.assert_called_once_with( + hass=hass, + device_id=dev.id, + media_url="https://www.home-assistant.io/resolved.mp3", + media_format="flac", + rate=48000, + channels=2, + width=2, + ) + + +async def test_start_conversation_message_with_preannounce( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test start conversation with message and preannounce media id.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.SPEAKER + | VoiceAssistantFeature.API_AUDIO + | VoiceAssistantFeature.ANNOUNCE + | VoiceAssistantFeature.START_CONVERSATION + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + pipeline = assist_pipeline.Pipeline( + conversation_engine="test engine", + conversation_language="en", + language="en", + name="test pipeline", + stt_engine="test stt", + stt_language="en", + tts_engine="test tts", + tts_language="en", + tts_voice=None, + wake_word_entity=None, + wake_word_id=None, + ) + + done = asyncio.Event() + + async def send_voice_assistant_announcement_await_response( + media_id: str, + timeout: float, + text: str, + start_conversation: bool, + preannounce_media_id: str, + ): + assert satellite.state == AssistSatelliteState.RESPONDING + assert media_id == "http://10.10.10.10:8123/api/tts_proxy/test-token" + assert text == "test-text" + assert start_conversation + assert preannounce_media_id == "test-preannounce" + + done.set() + + with ( + patch( + "homeassistant.components.tts.generate_media_source_id", + return_value="media-source://bla", + ), + patch( + "homeassistant.components.tts.async_resolve_engine", + return_value="tts.cloud_tts", + ), + patch( + "homeassistant.components.tts.async_create_stream", + return_value=MockResultStream(hass, "wav", b""), + ), + patch.object( + mock_client, + "send_voice_assistant_announcement_await_response", + new=send_voice_assistant_announcement_await_response, + ), + patch( + "homeassistant.components.assist_satellite.entity.async_get_pipeline", + return_value=pipeline, + ), + ): + async with asyncio.timeout(1): + await hass.services.async_call( + assist_satellite.DOMAIN, + "start_conversation", + { + "entity_id": satellite.entity_id, + "start_message": "test-text", + "preannounce_media_id": "test-preannounce", + }, + blocking=True, + ) + await done.wait() + assert satellite.state == AssistSatelliteState.IDLE + + async def test_satellite_unloaded_on_disconnect( hass: HomeAssistant, mock_client: APIClient, diff --git a/tests/components/esphome/test_entity.py b/tests/components/esphome/test_entity.py index 296d61b664d..977ec50ab30 100644 --- a/tests/components/esphome/test_entity.py +++ b/tests/components/esphome/test_entity.py @@ -260,6 +260,76 @@ async def test_entities_removed_after_reload( assert len(hass_storage[storage_key]["data"]["binary_sensor"]) == 1 +async def test_entities_for_entire_platform_removed( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_client: APIClient, + hass_storage: dict[str, Any], + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test removing all entities for a specific platform when static info changes.""" + entity_info = [ + BinarySensorInfo( + object_id="mybinary_sensor_to_be_removed", + key=1, + name="my binary_sensor to be removed", + unique_id="mybinary_sensor_to_be_removed", + ), + ] + states = [ + BinarySensorState(key=1, state=True, missing_state=False), + ] + user_service = [] + mock_device = await mock_esphome_device( + mock_client=mock_client, + entity_info=entity_info, + user_service=user_service, + states=states, + ) + entry = mock_device.entry + entry_id = entry.entry_id + storage_key = f"esphome.{entry_id}" + state = hass.states.get("binary_sensor.test_mybinary_sensor_to_be_removed") + assert state is not None + assert state.state == STATE_ON + + await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + assert len(hass_storage[storage_key]["data"]["binary_sensor"]) == 1 + + state = hass.states.get("binary_sensor.test_mybinary_sensor_to_be_removed") + assert state is not None + reg_entry = entity_registry.async_get( + "binary_sensor.test_mybinary_sensor_to_be_removed" + ) + assert reg_entry is not None + assert state.attributes[ATTR_RESTORED] is True + + entity_info = [] + states = [] + mock_device = await mock_esphome_device( + mock_client=mock_client, + entity_info=entity_info, + user_service=user_service, + states=states, + entry=entry, + ) + assert mock_device.entry.entry_id == entry_id + state = hass.states.get("binary_sensor.test_mybinary_sensor_to_be_removed") + assert state is None + reg_entry = entity_registry.async_get( + "binary_sensor.test_mybinary_sensor_to_be_removed" + ) + assert reg_entry is None + await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + assert len(hass_storage[storage_key]["data"]["binary_sensor"]) == 0 + + async def test_entity_info_object_ids( hass: HomeAssistant, mock_client: APIClient, diff --git a/tests/components/esphome/test_event.py b/tests/components/esphome/test_event.py index c17dc4d98a9..d4688e8ab4e 100644 --- a/tests/components/esphome/test_event.py +++ b/tests/components/esphome/test_event.py @@ -4,6 +4,7 @@ from aioesphomeapi import APIClient, Event, EventInfo import pytest from homeassistant.components.event import EventDeviceClass +from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant @@ -11,9 +12,9 @@ from homeassistant.core import HomeAssistant async def test_generic_event_entity( hass: HomeAssistant, mock_client: APIClient, - mock_generic_device_entry, + mock_esphome_device, ) -> None: - """Test a generic event entity.""" + """Test a generic event entity and its availability behavior.""" entity_info = [ EventInfo( object_id="myevent", @@ -26,13 +27,31 @@ async def test_generic_event_entity( ] states = [Event(key=1, event_type="type1")] user_service = [] - await mock_generic_device_entry( + device = await mock_esphome_device( mock_client=mock_client, entity_info=entity_info, user_service=user_service, states=states, ) + await hass.async_block_till_done() + + # Test initial state state = hass.states.get("event.test_myevent") assert state is not None assert state.state == "2024-04-24T00:00:00.000+00:00" assert state.attributes["event_type"] == "type1" + + # Test device becomes unavailable + await device.mock_disconnect(True) + await hass.async_block_till_done() + state = hass.states.get("event.test_myevent") + assert state.state == STATE_UNAVAILABLE + + # Test device becomes available again + await device.mock_connect() + await hass.async_block_till_done() + + # Event entity should be available immediately without waiting for data + state = hass.states.get("event.test_myevent") + assert state.state == "2024-04-24T00:00:00.000+00:00" + assert state.attributes["event_type"] == "type1" diff --git a/tests/components/esphome/test_update.py b/tests/components/esphome/test_update.py index 5060471f5d2..910463f6e30 100644 --- a/tests/components/esphome/test_update.py +++ b/tests/components/esphome/test_update.py @@ -2,7 +2,7 @@ from collections.abc import Awaitable, Callable from typing import Any -from unittest.mock import Mock, patch +from unittest.mock import patch from aioesphomeapi import ( APIClient, @@ -86,26 +86,28 @@ def stub_reconnect(): ) async def test_update_entity( hass: HomeAssistant, - stub_reconnect, - mock_config_entry, - mock_device_info, mock_dashboard: dict[str, Any], - devices_payload, - expected_state, - expected_attributes, + devices_payload: list[dict[str, Any]], + expected_state: str, + expected_attributes: dict[str, Any], + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], ) -> None: """Test ESPHome update entity.""" mock_dashboard["configured"] = devices_payload await async_get_dashboard(hass).async_refresh() - with patch( - "homeassistant.components.esphome.update.DomainData.get_entry_data", - return_value=Mock(available=True, device_info=mock_device_info, info={}), - ): - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + ) - state = hass.states.get("update.none_firmware") + state = hass.states.get("update.test_firmware") assert state is not None assert state.state == expected_state for key, expected_value in expected_attributes.items(): @@ -130,7 +132,7 @@ async def test_update_entity( await hass.services.async_call( "update", "install", - {"entity_id": "update.none_firmware"}, + {"entity_id": "update.test_firmware"}, blocking=True, ) @@ -155,7 +157,7 @@ async def test_update_entity( await hass.services.async_call( "update", "install", - {"entity_id": "update.none_firmware"}, + {"entity_id": "update.test_firmware"}, blocking=True, ) @@ -177,7 +179,7 @@ async def test_update_entity( await hass.services.async_call( "update", "install", - {"entity_id": "update.none_firmware"}, + {"entity_id": "update.test_firmware"}, blocking=True, ) @@ -274,28 +276,30 @@ async def test_update_device_state_for_availability( async def test_update_entity_dashboard_not_available_startup( hass: HomeAssistant, - stub_reconnect, - mock_config_entry, - mock_device_info, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], mock_dashboard: dict[str, Any], ) -> None: """Test ESPHome update entity when dashboard is not available at startup.""" with ( - patch( - "homeassistant.components.esphome.update.DomainData.get_entry_data", - return_value=Mock(available=True, device_info=mock_device_info, info={}), - ), patch( "esphome_dashboard_api.ESPHomeDashboardAPI.get_devices", side_effect=TimeoutError, ), ): await async_get_dashboard(hass).async_refresh() - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + ) # We have a dashboard but it is not available - state = hass.states.get("update.none_firmware") + state = hass.states.get("update.test_firmware") assert state is None mock_dashboard["configured"] = [ @@ -308,7 +312,7 @@ async def test_update_entity_dashboard_not_available_startup( await async_get_dashboard(hass).async_refresh() await hass.async_block_till_done() - state = hass.states.get("update.none_firmware") + state = hass.states.get("update.test_firmware") assert state.state == STATE_ON expected_attributes = { "latest_version": "2023.2.0-dev", @@ -370,17 +374,22 @@ async def test_update_entity_dashboard_discovered_after_startup_but_update_faile async def test_update_entity_not_present_without_dashboard( - hass: HomeAssistant, stub_reconnect, mock_config_entry, mock_device_info + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], ) -> None: """Test ESPHome update entity does not get created if there is no dashboard.""" - with patch( - "homeassistant.components.esphome.update.DomainData.get_entry_data", - return_value=Mock(available=True, device_info=mock_device_info, info={}), - ): - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + ) - state = hass.states.get("update.none_firmware") + state = hass.states.get("update.test_firmware") assert state is None diff --git a/tests/components/fritz/snapshots/test_sensor.ambr b/tests/components/fritz/snapshots/test_sensor.ambr index 5ff0e448b15..ffede386099 100644 --- a/tests/components/fritz/snapshots/test_sensor.ambr +++ b/tests/components/fritz/snapshots/test_sensor.ambr @@ -453,7 +453,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.mock_title_link_download_throughput', 'has_entity_name': True, 'hidden_by': None, @@ -598,7 +598,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.mock_title_link_upload_throughput', 'has_entity_name': True, 'hidden_by': None, @@ -647,7 +647,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': , + 'entity_category': None, 'entity_id': 'sensor.mock_title_max_connection_download_throughput', 'has_entity_name': True, 'hidden_by': None, @@ -696,7 +696,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': , + 'entity_category': None, 'entity_id': 'sensor.mock_title_max_connection_upload_throughput', 'has_entity_name': True, 'hidden_by': None, diff --git a/tests/components/fritzbox/test_climate.py b/tests/components/fritzbox/test_climate.py index 0784d7b6188..7766d906f68 100644 --- a/tests/components/fritzbox/test_climate.py +++ b/tests/components/fritzbox/test_climate.py @@ -16,6 +16,7 @@ from homeassistant.components.climate import ( ATTR_PRESET_MODE, ATTR_PRESET_MODES, DOMAIN as CLIMATE_DOMAIN, + PRESET_BOOST, PRESET_COMFORT, PRESET_ECO, SERVICE_SET_HVAC_MODE, @@ -80,7 +81,11 @@ async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: assert state.attributes[ATTR_MAX_TEMP] == 28 assert state.attributes[ATTR_MIN_TEMP] == 8 assert state.attributes[ATTR_PRESET_MODE] is None - assert state.attributes[ATTR_PRESET_MODES] == [PRESET_ECO, PRESET_COMFORT] + assert state.attributes[ATTR_PRESET_MODES] == [ + PRESET_ECO, + PRESET_COMFORT, + PRESET_BOOST, + ] assert state.attributes[ATTR_STATE_BATTERY_LOW] is True assert state.attributes[ATTR_STATE_HOLIDAY_MODE] is False assert state.attributes[ATTR_STATE_SUMMER_MODE] is False @@ -434,11 +439,31 @@ async def test_set_preset_mode_eco( assert device.set_target_temperature.call_args_list == expected_call_args +async def test_set_preset_mode_boost( + hass: HomeAssistant, + fritz: Mock, +) -> None: + """Test setting preset mode.""" + device = FritzDeviceClimateMock() + assert await setup_config_entry( + hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz + ) + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: PRESET_BOOST}, + True, + ) + assert device.set_target_temperature.call_count == 1 + assert device.set_target_temperature.call_args_list == [call(30, True)] + + async def test_preset_mode_update(hass: HomeAssistant, fritz: Mock) -> None: """Test preset mode.""" device = FritzDeviceClimateMock() - device.comfort_temperature = 98 - device.eco_temperature = 99 + device.comfort_temperature = 23 + device.eco_temperature = 20 assert await setup_config_entry( hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz ) @@ -447,8 +472,8 @@ async def test_preset_mode_update(hass: HomeAssistant, fritz: Mock) -> None: assert state assert state.attributes[ATTR_PRESET_MODE] is None - device.target_temperature = 98 - + # test comfort preset + device.target_temperature = 23 next_update = dt_util.utcnow() + timedelta(seconds=200) async_fire_time_changed(hass, next_update) await hass.async_block_till_done(wait_background_tasks=True) @@ -458,8 +483,8 @@ async def test_preset_mode_update(hass: HomeAssistant, fritz: Mock) -> None: assert state assert state.attributes[ATTR_PRESET_MODE] == PRESET_COMFORT - device.target_temperature = 99 - + # test eco preset + device.target_temperature = 20 next_update = dt_util.utcnow() + timedelta(seconds=200) async_fire_time_changed(hass, next_update) await hass.async_block_till_done(wait_background_tasks=True) @@ -469,6 +494,17 @@ async def test_preset_mode_update(hass: HomeAssistant, fritz: Mock) -> None: assert state assert state.attributes[ATTR_PRESET_MODE] == PRESET_ECO + # test boost preset + device.target_temperature = 127 # special temp from the api + next_update = dt_util.utcnow() + timedelta(seconds=200) + async_fire_time_changed(hass, next_update) + await hass.async_block_till_done(wait_background_tasks=True) + state = hass.states.get(ENTITY_ID) + + assert fritz().update_devices.call_count == 4 + assert state + assert state.attributes[ATTR_PRESET_MODE] == PRESET_BOOST + async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: """Test adding new discovered devices during runtime.""" @@ -509,7 +545,11 @@ async def test_holidy_summer_mode( assert state.attributes[ATTR_STATE_SUMMER_MODE] is False assert state.attributes[ATTR_HVAC_MODES] == [HVACMode.HEAT, HVACMode.OFF] assert state.attributes[ATTR_PRESET_MODE] is None - assert state.attributes[ATTR_PRESET_MODES] == [PRESET_ECO, PRESET_COMFORT] + assert state.attributes[ATTR_PRESET_MODES] == [ + PRESET_ECO, + PRESET_COMFORT, + PRESET_BOOST, + ] # test holiday mode device.holiday_active = True @@ -596,4 +636,8 @@ async def test_holidy_summer_mode( assert state.attributes[ATTR_STATE_SUMMER_MODE] is False assert state.attributes[ATTR_HVAC_MODES] == [HVACMode.HEAT, HVACMode.OFF] assert state.attributes[ATTR_PRESET_MODE] is None - assert state.attributes[ATTR_PRESET_MODES] == [PRESET_ECO, PRESET_COMFORT] + assert state.attributes[ATTR_PRESET_MODES] == [ + PRESET_ECO, + PRESET_COMFORT, + PRESET_BOOST, + ] diff --git a/tests/components/generic_thermostat/test_climate.py b/tests/components/generic_thermostat/test_climate.py index 7e2e92f025b..65be83bad20 100644 --- a/tests/components/generic_thermostat/test_climate.py +++ b/tests/components/generic_thermostat/test_climate.py @@ -1119,6 +1119,52 @@ async def test_precision(hass: HomeAssistant) -> None: assert state.attributes.get("target_temp_step") == 0.1 +@pytest.fixture( + params=[ + HVACMode.HEAT, + HVACMode.COOL, + ] +) +async def setup_comp_10(hass: HomeAssistant, request: pytest.FixtureRequest) -> None: + """Initialize components.""" + assert await async_setup_component( + hass, + CLIMATE_DOMAIN, + { + "climate": { + "platform": "generic_thermostat", + "name": "test", + "cold_tolerance": 0, + "hot_tolerance": 0, + "target_temp": 25, + "heater": ENT_SWITCH, + "target_sensor": ENT_SENSOR, + "initial_hvac_mode": request.param, + } + }, + ) + await hass.async_block_till_done() + + +@pytest.mark.usefixtures("setup_comp_10") +async def test_zero_tolerances(hass: HomeAssistant) -> None: + """Test that having a zero tolerance doesn't cause the switch to flip-flop.""" + + # if the switch is off, it should remain off + calls = _setup_switch(hass, False) + _setup_sensor(hass, 25) + await hass.async_block_till_done() + await common.async_set_temperature(hass, 25) + assert len(calls) == 0 + + # if the switch is on, it should turn off + calls = _setup_switch(hass, True) + _setup_sensor(hass, 25) + await hass.async_block_till_done() + await common.async_set_temperature(hass, 25) + assert len(calls) == 1 + + async def test_custom_setup_params(hass: HomeAssistant) -> None: """Test the setup with custom parameters.""" result = await async_setup_component( diff --git a/tests/components/google/test_calendar.py b/tests/components/google/test_calendar.py index 274e310fbce..720c0176850 100644 --- a/tests/components/google/test_calendar.py +++ b/tests/components/google/test_calendar.py @@ -1455,6 +1455,7 @@ async def test_working_location_ignored( ("event_type", "expected_event_message"), [ ("workingLocation", "Test All Day Event"), + ("birthday", None), ("default", None), ], ) @@ -1515,3 +1516,49 @@ async def test_no_working_location_entity( entity_entry = entity_registry.async_get("calendar.working_location") assert not entity_entry + + +@pytest.mark.parametrize( + ("event_type", "expected_event_message"), + [ + ("workingLocation", None), + ("birthday", "Test All Day Event"), + ("default", None), + ], +) +@pytest.mark.parametrize("calendar_is_primary", [True]) +async def test_birthday_entity( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + entity_registry: er.EntityRegistry, + mock_events_list_items: Callable[[list[dict[str, Any]]], None], + component_setup: ComponentSetup, + event_type: str, + expected_event_message: str | None, +) -> None: + """Test that birthday events appear only on the birthdays calendar.""" + event = { + **TEST_EVENT, + **upcoming(), + "eventType": event_type, + } + mock_events_list_items([event]) + assert await component_setup() + + entity_entry = entity_registry.async_get("calendar.birthdays") + assert entity_entry + assert entity_entry.disabled_by is None # Enabled by default + + entity_registry.async_update_entity( + entity_id="calendar.birthdays", disabled_by=None + ) + async_fire_time_changed( + hass, + dt_util.utcnow() + datetime.timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), + ) + await hass.async_block_till_done() + + state = hass.states.get("calendar.birthdays") + assert state + assert state.name == "Birthdays" + assert state.attributes.get("message") == expected_event_message diff --git a/tests/components/google_generative_ai_conversation/__init__.py b/tests/components/google_generative_ai_conversation/__init__.py index 6e2d37b035b..fbf9ee545db 100644 --- a/tests/components/google_generative_ai_conversation/__init__.py +++ b/tests/components/google_generative_ai_conversation/__init__.py @@ -3,12 +3,12 @@ from unittest.mock import Mock from google.genai.errors import ClientError -import requests +import httpx CLIENT_ERROR_500 = ClientError( 500, Mock( - __class__=requests.Response, + __class__=httpx.Response, json=Mock( return_value={ "message": "Internal Server Error", @@ -20,7 +20,7 @@ CLIENT_ERROR_500 = ClientError( CLIENT_ERROR_API_KEY_INVALID = ClientError( 400, Mock( - __class__=requests.Response, + __class__=httpx.Response, json=Mock( return_value={ "message": "'reason': API_KEY_INVALID", diff --git a/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr b/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr index 2a20ce37a57..ec98bdd6529 100644 --- a/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr +++ b/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr @@ -6,7 +6,7 @@ tuple( ), dict({ - 'config': GenerateContentConfig(http_options=None, system_instruction="Current time is 05:00:00. Today's date is 2024-05-24.\nYou are a voice assistant for Home Assistant.\nAnswer questions about the world truthfully.\nAnswer in plain text. Keep it simple and to the point.\nOnly if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant.", temperature=1.0, top_p=0.95, top_k=64.0, candidate_count=None, max_output_tokens=150, stop_sequences=None, response_logprobs=None, logprobs=None, presence_penalty=None, frequency_penalty=None, seed=None, response_mime_type=None, response_schema=None, routing_config=None, safety_settings=[SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=)], tools=[Tool(function_declarations=[FunctionDeclaration(response=None, description='Test function', name='test_tool', parameters=Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=, description=None, enum=None, format=None, items=None, properties={'param1': Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=, description='Test parameters', enum=None, format=None, items=Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=, description=None, enum=None, format=None, items=None, properties=None, required=None), properties=None, required=None), 'param2': Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=None, description=None, enum=None, format=None, items=None, properties=None, required=None), 'param3': Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=, description=None, enum=None, format=None, items=None, properties={'json': Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=, description=None, enum=None, format=None, items=None, properties=None, required=None)}, required=[])}, required=[]))], retrieval=None, google_search=None, google_search_retrieval=None, code_execution=None)], tool_config=None, labels=None, cached_content=None, response_modalities=None, media_resolution=None, speech_config=None, audio_timestamp=None, automatic_function_calling=AutomaticFunctionCallingConfig(disable=True, maximum_remote_calls=None, ignore_call_history=None), thinking_config=None), + 'config': GenerateContentConfig(http_options=None, system_instruction="Current time is 05:00:00. Today's date is 2024-05-24.\nYou are a voice assistant for Home Assistant.\nAnswer questions about the world truthfully.\nAnswer in plain text. Keep it simple and to the point.\nOnly if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant.", temperature=1.0, top_p=0.95, top_k=64.0, candidate_count=None, max_output_tokens=150, stop_sequences=None, response_logprobs=None, logprobs=None, presence_penalty=None, frequency_penalty=None, seed=None, response_mime_type=None, response_schema=None, routing_config=None, safety_settings=[SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=)], tools=[Tool(function_declarations=[FunctionDeclaration(response=None, description='Test function', name='test_tool', parameters=Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties={'param1': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description='Test parameters', enum=None, format=None, items=Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=), max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=), 'param2': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=None), 'param3': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties={'json': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=)}, property_ordering=None, required=[], type=)}, property_ordering=None, required=[], type=))], retrieval=None, google_search=None, google_search_retrieval=None, code_execution=None)], tool_config=None, labels=None, cached_content=None, response_modalities=None, media_resolution=None, speech_config=None, audio_timestamp=None, automatic_function_calling=AutomaticFunctionCallingConfig(disable=True, maximum_remote_calls=None, ignore_call_history=None), thinking_config=None), 'history': list([ ]), 'model': 'models/gemini-2.0-flash', @@ -25,7 +25,9 @@ tuple( ), dict({ - 'message': Content(parts=[Part(video_metadata=None, thought=None, code_execution_result=None, executable_code=None, file_data=None, function_call=None, function_response=FunctionResponse(id=None, name='test_tool', response={'result': 'Test response'}), inline_data=None, text=None)], role=None), + 'message': list([ + Part(video_metadata=None, thought=None, code_execution_result=None, executable_code=None, file_data=None, function_call=None, function_response=FunctionResponse(id=None, name='test_tool', response={'result': 'Test response'}), inline_data=None, text=None), + ]), }), ), ]) @@ -56,7 +58,9 @@ tuple( ), dict({ - 'message': Content(parts=[Part(video_metadata=None, thought=None, code_execution_result=None, executable_code=None, file_data=None, function_call=None, function_response=FunctionResponse(id=None, name='test_tool', response={'result': 'Test response'}), inline_data=None, text=None)], role=None), + 'message': list([ + Part(video_metadata=None, thought=None, code_execution_result=None, executable_code=None, file_data=None, function_call=None, function_response=FunctionResponse(id=None, name='test_tool', response={'result': 'Test response'}), inline_data=None, text=None), + ]), }), ), ]) @@ -68,7 +72,7 @@ tuple( ), dict({ - 'config': GenerateContentConfig(http_options=None, system_instruction="Current time is 05:00:00. Today's date is 2024-05-24.\nYou are a voice assistant for Home Assistant.\nAnswer questions about the world truthfully.\nAnswer in plain text. Keep it simple and to the point.\nOnly if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant.", temperature=1.0, top_p=0.95, top_k=64.0, candidate_count=None, max_output_tokens=150, stop_sequences=None, response_logprobs=None, logprobs=None, presence_penalty=None, frequency_penalty=None, seed=None, response_mime_type=None, response_schema=None, routing_config=None, safety_settings=[SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=)], tools=[Tool(function_declarations=[FunctionDeclaration(response=None, description='Test function', name='test_tool', parameters=Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=, description=None, enum=None, format=None, items=None, properties={'param1': Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=, description='Test parameters', enum=None, format=None, items=Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=, description=None, enum=None, format=None, items=None, properties=None, required=None), properties=None, required=None), 'param2': Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=None, description=None, enum=None, format=None, items=None, properties=None, required=None), 'param3': Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=, description=None, enum=None, format=None, items=None, properties={'json': Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=, description=None, enum=None, format=None, items=None, properties=None, required=None)}, required=[])}, required=[]))], retrieval=None, google_search=None, google_search_retrieval=None, code_execution=None), Tool(function_declarations=None, retrieval=None, google_search=GoogleSearch(), google_search_retrieval=None, code_execution=None)], tool_config=None, labels=None, cached_content=None, response_modalities=None, media_resolution=None, speech_config=None, audio_timestamp=None, automatic_function_calling=AutomaticFunctionCallingConfig(disable=True, maximum_remote_calls=None, ignore_call_history=None), thinking_config=None), + 'config': GenerateContentConfig(http_options=None, system_instruction="Current time is 05:00:00. Today's date is 2024-05-24.\nYou are a voice assistant for Home Assistant.\nAnswer questions about the world truthfully.\nAnswer in plain text. Keep it simple and to the point.\nOnly if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant.", temperature=1.0, top_p=0.95, top_k=64.0, candidate_count=None, max_output_tokens=150, stop_sequences=None, response_logprobs=None, logprobs=None, presence_penalty=None, frequency_penalty=None, seed=None, response_mime_type=None, response_schema=None, routing_config=None, safety_settings=[SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=)], tools=[Tool(function_declarations=[FunctionDeclaration(response=None, description='Test function', name='test_tool', parameters=Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties={'param1': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description='Test parameters', enum=None, format=None, items=Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=), max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=), 'param2': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=None), 'param3': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties={'json': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=)}, property_ordering=None, required=[], type=)}, property_ordering=None, required=[], type=))], retrieval=None, google_search=None, google_search_retrieval=None, code_execution=None), Tool(function_declarations=None, retrieval=None, google_search=GoogleSearch(), google_search_retrieval=None, code_execution=None)], tool_config=None, labels=None, cached_content=None, response_modalities=None, media_resolution=None, speech_config=None, audio_timestamp=None, automatic_function_calling=AutomaticFunctionCallingConfig(disable=True, maximum_remote_calls=None, ignore_call_history=None), thinking_config=None), 'history': list([ ]), 'model': 'models/gemini-2.0-flash', @@ -87,7 +91,9 @@ tuple( ), dict({ - 'message': Content(parts=[Part(video_metadata=None, thought=None, code_execution_result=None, executable_code=None, file_data=None, function_call=None, function_response=FunctionResponse(id=None, name='test_tool', response={'result': 'Test response'}), inline_data=None, text=None)], role=None), + 'message': list([ + Part(video_metadata=None, thought=None, code_execution_result=None, executable_code=None, file_data=None, function_call=None, function_response=FunctionResponse(id=None, name='test_tool', response={'result': 'Test response'}), inline_data=None, text=None), + ]), }), ), ]) diff --git a/tests/components/google_generative_ai_conversation/test_conversation.py b/tests/components/google_generative_ai_conversation/test_conversation.py index bdf1c01fd31..a2b238b9399 100644 --- a/tests/components/google_generative_ai_conversation/test_conversation.py +++ b/tests/components/google_generative_ai_conversation/test_conversation.py @@ -104,28 +104,24 @@ async def test_function_call( assert result.response.response_type == intent.IntentResponseType.ACTION_DONE assert result.response.as_dict()["speech"]["plain"]["speech"] == "Hi there!" - mock_tool_call = mock_create.mock_calls[2][2]["message"] - assert mock_tool_call.model_dump() == { - "parts": [ - { - "code_execution_result": None, - "executable_code": None, - "file_data": None, - "function_call": None, - "function_response": { - "id": None, - "name": "test_tool", - "response": { - "result": "Test response", - }, - }, - "inline_data": None, - "text": None, - "thought": None, - "video_metadata": None, + mock_tool_response_parts = mock_create.mock_calls[2][2]["message"] + assert len(mock_tool_response_parts) == 1 + assert mock_tool_response_parts[0].model_dump() == { + "code_execution_result": None, + "executable_code": None, + "file_data": None, + "function_call": None, + "function_response": { + "id": None, + "name": "test_tool", + "response": { + "result": "Test response", }, - ], - "role": None, + }, + "inline_data": None, + "text": None, + "thought": None, + "video_metadata": None, } mock_tool.async_call.assert_awaited_once_with( @@ -292,28 +288,24 @@ async def test_function_call_without_parameters( assert result.response.response_type == intent.IntentResponseType.ACTION_DONE assert result.response.as_dict()["speech"]["plain"]["speech"] == "Hi there!" - mock_tool_call = mock_create.mock_calls[2][2]["message"] - assert mock_tool_call.model_dump() == { - "parts": [ - { - "code_execution_result": None, - "executable_code": None, - "file_data": None, - "function_call": None, - "function_response": { - "id": None, - "name": "test_tool", - "response": { - "result": "Test response", - }, - }, - "inline_data": None, - "text": None, - "thought": None, - "video_metadata": None, + mock_tool_response_parts = mock_create.mock_calls[2][2]["message"] + assert len(mock_tool_response_parts) == 1 + assert mock_tool_response_parts[0].model_dump() == { + "code_execution_result": None, + "executable_code": None, + "file_data": None, + "function_call": None, + "function_response": { + "id": None, + "name": "test_tool", + "response": { + "result": "Test response", }, - ], - "role": None, + }, + "inline_data": None, + "text": None, + "thought": None, + "video_metadata": None, } mock_tool.async_call.assert_awaited_once_with( @@ -390,29 +382,25 @@ async def test_function_exception( assert result.response.response_type == intent.IntentResponseType.ACTION_DONE assert result.response.as_dict()["speech"]["plain"]["speech"] == "Hi there!" - mock_tool_call = mock_create.mock_calls[2][2]["message"] - assert mock_tool_call.model_dump() == { - "parts": [ - { - "code_execution_result": None, - "executable_code": None, - "file_data": None, - "function_call": None, - "function_response": { - "id": None, - "name": "test_tool", - "response": { - "error": "HomeAssistantError", - "error_text": "Test tool exception", - }, - }, - "inline_data": None, - "text": None, - "thought": None, - "video_metadata": None, + mock_tool_response_parts = mock_create.mock_calls[2][2]["message"] + assert len(mock_tool_response_parts) == 1 + assert mock_tool_response_parts[0].model_dump() == { + "code_execution_result": None, + "executable_code": None, + "file_data": None, + "function_call": None, + "function_response": { + "id": None, + "name": "test_tool", + "response": { + "error": "HomeAssistantError", + "error_text": "Test tool exception", }, - ], - "role": None, + }, + "inline_data": None, + "text": None, + "thought": None, + "video_metadata": None, } mock_tool.async_call.assert_awaited_once_with( hass, diff --git a/tests/components/gree/snapshots/test_switch.ambr b/tests/components/gree/snapshots/test_switch.ambr index 836641cb2ab..c3fa3ae24c7 100644 --- a/tests/components/gree/snapshots/test_switch.ambr +++ b/tests/components/gree/snapshots/test_switch.ambr @@ -16,10 +16,10 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'switch', - 'friendly_name': 'fake-device-1 Quiet', + 'friendly_name': 'fake-device-1 Quiet mode', }), 'context': , - 'entity_id': 'switch.fake_device_1_quiet', + 'entity_id': 'switch.fake_device_1_quiet_mode', 'last_changed': , 'last_reported': , 'last_updated': , @@ -40,10 +40,10 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'switch', - 'friendly_name': 'fake-device-1 XFan', + 'friendly_name': 'fake-device-1 Xtra fan', }), 'context': , - 'entity_id': 'switch.fake_device_1_xfan', + 'entity_id': 'switch.fake_device_1_xtra_fan', 'last_changed': , 'last_reported': , 'last_updated': , @@ -109,7 +109,7 @@ 'disabled_by': None, 'domain': 'switch', 'entity_category': None, - 'entity_id': 'switch.fake_device_1_quiet', + 'entity_id': 'switch.fake_device_1_quiet_mode', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -121,7 +121,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Quiet', + 'original_name': 'Quiet mode', 'platform': 'gree', 'previous_unique_id': None, 'supported_features': 0, @@ -173,7 +173,7 @@ 'disabled_by': None, 'domain': 'switch', 'entity_category': None, - 'entity_id': 'switch.fake_device_1_xfan', + 'entity_id': 'switch.fake_device_1_xtra_fan', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -185,7 +185,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'XFan', + 'original_name': 'Xtra fan', 'platform': 'gree', 'previous_unique_id': None, 'supported_features': 0, diff --git a/tests/components/gree/test_switch.py b/tests/components/gree/test_switch.py index e9491796bdf..331b6dfa4a6 100644 --- a/tests/components/gree/test_switch.py +++ b/tests/components/gree/test_switch.py @@ -22,11 +22,11 @@ from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry -ENTITY_ID_LIGHT_PANEL = f"{SWITCH_DOMAIN}.fake_device_1_panel_light" +ENTITY_ID_PANEL_LIGHT = f"{SWITCH_DOMAIN}.fake_device_1_panel_light" ENTITY_ID_HEALTH_MODE = f"{SWITCH_DOMAIN}.fake_device_1_health_mode" -ENTITY_ID_QUIET = f"{SWITCH_DOMAIN}.fake_device_1_quiet" +ENTITY_ID_QUIET_MODE = f"{SWITCH_DOMAIN}.fake_device_1_quiet_mode" ENTITY_ID_FRESH_AIR = f"{SWITCH_DOMAIN}.fake_device_1_fresh_air" -ENTITY_ID_XFAN = f"{SWITCH_DOMAIN}.fake_device_1_xfan" +ENTITY_ID_XTRA_FAN = f"{SWITCH_DOMAIN}.fake_device_1_xtra_fan" async def async_setup_gree(hass: HomeAssistant) -> MockConfigEntry: @@ -54,11 +54,11 @@ async def test_registry_settings( @pytest.mark.parametrize( "entity", [ - ENTITY_ID_LIGHT_PANEL, + ENTITY_ID_PANEL_LIGHT, ENTITY_ID_HEALTH_MODE, - ENTITY_ID_QUIET, + ENTITY_ID_QUIET_MODE, ENTITY_ID_FRESH_AIR, - ENTITY_ID_XFAN, + ENTITY_ID_XTRA_FAN, ], ) @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -81,11 +81,11 @@ async def test_send_switch_on(hass: HomeAssistant, entity: str) -> None: @pytest.mark.parametrize( "entity", [ - ENTITY_ID_LIGHT_PANEL, + ENTITY_ID_PANEL_LIGHT, ENTITY_ID_HEALTH_MODE, - ENTITY_ID_QUIET, + ENTITY_ID_QUIET_MODE, ENTITY_ID_FRESH_AIR, - ENTITY_ID_XFAN, + ENTITY_ID_XTRA_FAN, ], ) @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -112,11 +112,11 @@ async def test_send_switch_on_device_timeout( @pytest.mark.parametrize( "entity", [ - ENTITY_ID_LIGHT_PANEL, + ENTITY_ID_PANEL_LIGHT, ENTITY_ID_HEALTH_MODE, - ENTITY_ID_QUIET, + ENTITY_ID_QUIET_MODE, ENTITY_ID_FRESH_AIR, - ENTITY_ID_XFAN, + ENTITY_ID_XTRA_FAN, ], ) @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -139,11 +139,11 @@ async def test_send_switch_off(hass: HomeAssistant, entity: str) -> None: @pytest.mark.parametrize( "entity", [ - ENTITY_ID_LIGHT_PANEL, + ENTITY_ID_PANEL_LIGHT, ENTITY_ID_HEALTH_MODE, - ENTITY_ID_QUIET, + ENTITY_ID_QUIET_MODE, ENTITY_ID_FRESH_AIR, - ENTITY_ID_XFAN, + ENTITY_ID_XTRA_FAN, ], ) @pytest.mark.usefixtures("entity_registry_enabled_by_default") diff --git a/tests/components/habitica/test_services.py b/tests/components/habitica/test_services.py index 258346b9ca7..774593fa0f6 100644 --- a/tests/components/habitica/test_services.py +++ b/tests/components/habitica/test_services.py @@ -60,6 +60,7 @@ from homeassistant.components.habitica.const import ( SERVICE_ACCEPT_QUEST, SERVICE_CANCEL_QUEST, SERVICE_CAST_SKILL, + SERVICE_CREATE_DAILY, SERVICE_CREATE_HABIT, SERVICE_CREATE_REWARD, SERVICE_CREATE_TODO, @@ -1012,7 +1013,12 @@ async def test_update_task_exceptions( ) @pytest.mark.parametrize( "service", - [SERVICE_CREATE_REWARD, SERVICE_CREATE_HABIT, SERVICE_CREATE_TODO], + [ + SERVICE_CREATE_DAILY, + SERVICE_CREATE_HABIT, + SERVICE_CREATE_REWARD, + SERVICE_CREATE_TODO, + ], ) @pytest.mark.usefixtures("habitica") async def test_create_task_exceptions( @@ -1837,6 +1843,182 @@ async def test_update_daily( habitica.update_task.assert_awaited_with(UUID(task_id), call_args) +@pytest.mark.parametrize( + ("service_data", "call_args"), + [ + ( + { + ATTR_NAME: "TITLE", + }, + Task(type=TaskType.DAILY, text="TITLE"), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_NOTES: "NOTES", + }, + Task(type=TaskType.DAILY, text="TITLE", notes="NOTES"), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_ADD_CHECKLIST_ITEM: "Checklist-item", + }, + Task( + type=TaskType.DAILY, + text="TITLE", + checklist=[ + Checklist( + id=UUID("12345678-1234-5678-1234-567812345678"), + text="Checklist-item", + completed=False, + ), + ], + ), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_PRIORITY: "trivial", + }, + Task(type=TaskType.DAILY, text="TITLE", priority=TaskPriority.TRIVIAL), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_START_DATE: "2025-03-05", + }, + Task(type=TaskType.DAILY, text="TITLE", startDate=datetime(2025, 3, 5)), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_FREQUENCY: "weekly", + }, + Task(type=TaskType.DAILY, text="TITLE", frequency=Frequency.WEEKLY), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_INTERVAL: 5, + }, + Task(type=TaskType.DAILY, text="TITLE", everyX=5), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_FREQUENCY: "weekly", + ATTR_REPEAT: ["m", "t", "w", "th"], + }, + Task( + type=TaskType.DAILY, + text="TITLE", + frequency=Frequency.WEEKLY, + repeat=Repeat(m=True, t=True, w=True, th=True), + ), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_FREQUENCY: "monthly", + ATTR_REPEAT_MONTHLY: "day_of_month", + }, + Task( + type=TaskType.DAILY, + text="TITLE", + frequency=Frequency.MONTHLY, + daysOfMonth=[25], + weeksOfMonth=[], + ), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_FREQUENCY: "monthly", + ATTR_REPEAT_MONTHLY: "day_of_week", + }, + Task( + type=TaskType.DAILY, + text="TITLE", + frequency=Frequency.MONTHLY, + daysOfMonth=[], + weeksOfMonth=[3], + repeat=Repeat( + m=False, t=True, w=False, th=False, f=False, s=False, su=False + ), + ), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_REMINDER: ["10:00"], + }, + Task( + type=TaskType.DAILY, + text="TITLE", + reminders=[ + Reminders( + id=UUID("12345678-1234-5678-1234-567812345678"), + time=datetime(2025, 2, 25, 10, 0, tzinfo=UTC), + startDate=None, + ) + ], + ), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_REMOVE_REMINDER: ["10:00"], + }, + Task(type=TaskType.DAILY, text="TITLE", reminders=[]), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_CLEAR_REMINDER: True, + }, + Task(type=TaskType.DAILY, text="TITLE", reminders=[]), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_STREAK: 10, + }, + Task(type=TaskType.DAILY, text="TITLE", streak=10), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_ALIAS: "ALIAS", + }, + Task(type=TaskType.DAILY, text="TITLE", alias="ALIAS"), + ), + ], +) +@pytest.mark.usefixtures("mock_uuid4") +@freeze_time("2025-02-25T22:00:00.000Z") +async def test_create_daily( + hass: HomeAssistant, + config_entry: MockConfigEntry, + habitica: AsyncMock, + service_data: dict[str, Any], + call_args: Task, +) -> None: + """Test Habitica create daily action.""" + + await hass.services.async_call( + DOMAIN, + SERVICE_CREATE_DAILY, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + **service_data, + }, + return_response=True, + blocking=True, + ) + habitica.create_task.assert_awaited_with(call_args) + + @pytest.mark.parametrize( "service_data", [ diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py index e00994b355a..af951fe8aa1 100644 --- a/tests/components/hassio/test_backup.py +++ b/tests/components/hassio/test_backup.py @@ -3,6 +3,7 @@ from collections.abc import ( AsyncGenerator, AsyncIterator, + Buffer, Callable, Coroutine, Generator, @@ -13,7 +14,7 @@ from datetime import datetime from io import StringIO import os from pathlib import PurePath -from typing import Any +from typing import Any, cast from unittest.mock import ANY, AsyncMock, Mock, patch from uuid import UUID @@ -341,7 +342,7 @@ def mock_backup_agent( async def delete_backup(backup_id: str, **kwargs: Any) -> None: """Mock delete.""" - get_backup(backup_id) + await get_backup(backup_id) async def download_backup(backup_id: str, **kwargs: Any) -> AsyncIterator[bytes]: """Mock download.""" @@ -349,7 +350,7 @@ def mock_backup_agent( async def get_backup(backup_id: str, **kwargs: Any) -> AgentBackup: """Get a backup.""" - backup = next((b for b in backups if b.backup_id == backup_id), None) + backup = next((b for b in _backups if b.backup_id == backup_id), None) if backup is None: raise BackupNotFound return backup @@ -361,15 +362,15 @@ def mock_backup_agent( **kwargs: Any, ) -> None: """Upload a backup.""" - backups.append(backup) + _backups.append(backup) backup_stream = await open_stream() backup_data = bytearray() async for chunk in backup_stream: backup_data += chunk backups_data[backup.backup_id] = backup_data - backups = backups or [] - backups_data: dict[str, bytes] = {} + _backups = backups or [] + backups_data: dict[str, Buffer] = {} mock_agent = Mock(spec=BackupAgent) mock_agent.domain = domain mock_agent.name = name @@ -401,7 +402,7 @@ async def _setup_backup_platform( platform: BackupAgentPlatformProtocol, ) -> None: """Set up a mock domain.""" - mock_platform(hass, f"{domain}.backup", platform) + mock_platform(hass, f"{domain}.backup", cast(Mock, platform)) assert await async_setup_component(hass, domain, {}) await hass.async_block_till_done() @@ -423,7 +424,7 @@ async def _setup_backup_platform( name="test", read_only=False, state=supervisor_mounts.MountState.ACTIVE, - user_path="test", + user_path=PurePath("test"), usage=supervisor_mounts.MountUsage.BACKUP, server="test", type=supervisor_mounts.MountType.CIFS, @@ -441,7 +442,7 @@ async def _setup_backup_platform( name="test", read_only=False, state=supervisor_mounts.MountState.ACTIVE, - user_path="test", + user_path=PurePath("test"), usage=supervisor_mounts.MountUsage.MEDIA, server="test", type=supervisor_mounts.MountType.CIFS, @@ -854,7 +855,7 @@ DEFAULT_BACKUP_OPTIONS = supervisor_backups.PartialBackupOptions( "with_automatic_settings": False, }, filename=PurePath("Test_2025-01-30_05.42_12345678.tar"), - folders={"ssl"}, + folders={supervisor_backups.Folder("ssl")}, homeassistant_exclude_database=False, homeassistant=True, location=[LOCATION_LOCAL_STORAGE], @@ -877,7 +878,7 @@ DEFAULT_BACKUP_OPTIONS = supervisor_backups.PartialBackupOptions( ), ( {"include_all_addons": True}, - replace(DEFAULT_BACKUP_OPTIONS, addons="ALL"), + replace(DEFAULT_BACKUP_OPTIONS, addons=supervisor_backups.AddonSet("ALL")), ), ( {"include_database": False}, @@ -885,7 +886,14 @@ DEFAULT_BACKUP_OPTIONS = supervisor_backups.PartialBackupOptions( ), ( {"include_folders": ["media", "share"]}, - replace(DEFAULT_BACKUP_OPTIONS, folders={"media", "share", "ssl"}), + replace( + DEFAULT_BACKUP_OPTIONS, + folders={ + supervisor_backups.Folder("media"), + supervisor_backups.Folder("share"), + supervisor_backups.Folder("ssl"), + }, + ), ), ( { @@ -895,7 +903,7 @@ DEFAULT_BACKUP_OPTIONS = supervisor_backups.PartialBackupOptions( }, replace( DEFAULT_BACKUP_OPTIONS, - folders={"media"}, + folders={supervisor_backups.Folder("media")}, homeassistant=False, homeassistant_exclude_database=True, ), @@ -1251,11 +1259,11 @@ async def test_reader_writer_create_per_agent_encryption( hass_ws_client: WebSocketGenerator, freezer: FrozenDateTimeFactory, supervisor_client: AsyncMock, - commands: dict[str, Any], + commands: list[dict[str, Any]], password: str | None, agent_ids: list[str], password_sent_to_supervisor: str | None, - create_locations: list[str | None], + create_locations: list[str], create_protected: bool, upload_locations: list[str | None], ) -> None: @@ -1270,7 +1278,7 @@ async def test_reader_writer_create_per_agent_encryption( name=f"share{i}", read_only=False, state=supervisor_mounts.MountState.ACTIVE, - user_path=f"share{i}", + user_path=PurePath(f"share{i}"), usage=supervisor_mounts.MountUsage.BACKUP, server=f"share{i}", type=supervisor_mounts.MountType.CIFS, @@ -1996,7 +2004,7 @@ async def test_reader_writer_restore_remote_backup( homeassistant_version="2024.12.0", name="Test", protected=False, - size=0.0, + size=0, ) remote_agent = mock_backup_agent("remote", backups=[test_backup]) await _setup_backup_platform( @@ -2626,7 +2634,7 @@ async def test_config_load_config_info( freezer: FrozenDateTimeFactory, snapshot: SnapshotAssertion, hass_storage: dict[str, Any], - storage_data: dict[str, Any] | None, + storage_data: dict[str, Any], ) -> None: """Test loading stored backup config and reading it via config/info.""" client = await hass_ws_client(hass) diff --git a/tests/components/heos/__init__.py b/tests/components/heos/__init__.py index cb4313bbd10..cdf93c202f0 100644 --- a/tests/components/heos/__init__.py +++ b/tests/components/heos/__init__.py @@ -37,10 +37,13 @@ class MockHeos(Heos): self.play_preset_station: AsyncMock = AsyncMock() self.play_url: AsyncMock = AsyncMock() self.player_clear_queue: AsyncMock = AsyncMock() + self.player_get_queue: AsyncMock = AsyncMock() self.player_get_quick_selects: AsyncMock = AsyncMock() self.player_play_next: AsyncMock = AsyncMock() self.player_play_previous: AsyncMock = AsyncMock() + self.player_play_queue: AsyncMock = AsyncMock() self.player_play_quick_select: AsyncMock = AsyncMock() + self.player_remove_from_queue: AsyncMock = AsyncMock() self.player_set_mute: AsyncMock = AsyncMock() self.player_set_play_mode: AsyncMock = AsyncMock() self.player_set_play_state: AsyncMock = AsyncMock() diff --git a/tests/components/heos/conftest.py b/tests/components/heos/conftest.py index 5d06d1812ea..835e4436398 100644 --- a/tests/components/heos/conftest.py +++ b/tests/components/heos/conftest.py @@ -20,6 +20,7 @@ from pyheos import ( NetworkType, PlayerUpdateResult, PlayState, + QueueItem, RepeatType, const, ) @@ -359,3 +360,28 @@ def change_data_fixture() -> PlayerUpdateResult: def change_data_mapped_ids_fixture() -> PlayerUpdateResult: """Create player change data for testing.""" return PlayerUpdateResult(updated_player_ids={1: 101}) + + +@pytest.fixture(name="queue") +def queue_fixture() -> list[QueueItem]: + """Create a queue fixture.""" + return [ + QueueItem( + queue_id=1, + song="Espresso", + album="Espresso", + artist="Sabrina Carpenter", + image_url="http://resources.wimpmusic.com/images/e4f2d75f/a69e/4b8a/b800/e18546b1ad4c/640x640.jpg", + media_id="356276483", + album_id="356276481", + ), + QueueItem( + queue_id=2, + song="A Bar Song (Tipsy)", + album="A Bar Song (Tipsy)", + artist="Shaboozey", + image_url="http://resources.wimpmusic.com/images/d05b8da3/4fae/45ff/ac1b/7ab7caab3523/640x640.jpg", + media_id="354365598", + album_id="354365596", + ), + ] diff --git a/tests/components/heos/snapshots/test_media_player.ambr b/tests/components/heos/snapshots/test_media_player.ambr index 4cf84363ba0..d366a7f6317 100644 --- a/tests/components/heos/snapshots/test_media_player.ambr +++ b/tests/components/heos/snapshots/test_media_player.ambr @@ -159,6 +159,32 @@ 'title': 'Music Sources', }) # --- +# name: test_get_queue + dict({ + 'media_player.test_player': dict({ + 'queue': list([ + dict({ + 'album': 'Espresso', + 'album_id': '356276481', + 'artist': 'Sabrina Carpenter', + 'image_url': 'http://resources.wimpmusic.com/images/e4f2d75f/a69e/4b8a/b800/e18546b1ad4c/640x640.jpg', + 'media_id': '356276483', + 'queue_id': 1, + 'song': 'Espresso', + }), + dict({ + 'album': 'A Bar Song (Tipsy)', + 'album_id': '354365596', + 'artist': 'Shaboozey', + 'image_url': 'http://resources.wimpmusic.com/images/d05b8da3/4fae/45ff/ac1b/7ab7caab3523/640x640.jpg', + 'media_id': '354365598', + 'queue_id': 2, + 'song': 'A Bar Song (Tipsy)', + }), + ]), + }), + }) +# --- # name: test_state_attributes StateSnapshot({ 'attributes': ReadOnlyDict({ diff --git a/tests/components/heos/test_media_player.py b/tests/components/heos/test_media_player.py index d5bc8cab488..085a42337b3 100644 --- a/tests/components/heos/test_media_player.py +++ b/tests/components/heos/test_media_player.py @@ -15,6 +15,7 @@ from pyheos import ( MediaType as HeosMediaType, PlayerUpdateResult, PlayState, + QueueItem, RepeatType, SignalHeosEvent, SignalType, @@ -26,10 +27,13 @@ from syrupy.assertion import SnapshotAssertion from syrupy.filters import props from homeassistant.components.heos.const import ( + ATTR_QUEUE_IDS, DOMAIN, + SERVICE_GET_QUEUE, SERVICE_GROUP_VOLUME_DOWN, SERVICE_GROUP_VOLUME_SET, SERVICE_GROUP_VOLUME_UP, + SERVICE_REMOVE_FROM_QUEUE, ) from homeassistant.components.media_player import ( ATTR_GROUP_MEMBERS, @@ -1319,6 +1323,51 @@ async def test_play_media_music_source_url( controller.play_url.assert_called_once() +async def test_play_media_queue( + hass: HomeAssistant, + config_entry: MockConfigEntry, + controller: MockHeos, +) -> None: + """Test the play media service with type queue.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_CONTENT_TYPE: "queue", + ATTR_MEDIA_CONTENT_ID: "2", + }, + blocking=True, + ) + controller.player_play_queue.assert_called_once_with(1, 2) + + +async def test_play_media_queue_invalid( + hass: HomeAssistant, config_entry: MockConfigEntry, controller: MockHeos +) -> None: + """Test the play media service with an invalid queue id.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + with pytest.raises( + HomeAssistantError, + match=re.escape("Unable to play media: Invalid queue id 'Invalid'"), + ): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_CONTENT_TYPE: "queue", + ATTR_MEDIA_CONTENT_ID: "Invalid", + }, + blocking=True, + ) + assert controller.player_play_queue.call_count == 0 + + async def test_browse_media_root( hass: HomeAssistant, config_entry: MockConfigEntry, @@ -1696,3 +1745,42 @@ async def test_media_player_group_fails_wrong_integration( blocking=True, ) controller.set_group.assert_not_called() + + +async def test_get_queue( + hass: HomeAssistant, + config_entry: MockConfigEntry, + controller: MockHeos, + queue: list[QueueItem], + snapshot: SnapshotAssertion, +) -> None: + """Test the get queue service.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + controller.player_get_queue.return_value = queue + response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_QUEUE, + { + ATTR_ENTITY_ID: "media_player.test_player", + }, + blocking=True, + return_response=True, + ) + controller.player_get_queue.assert_called_once_with(1, None, None) + assert response == snapshot + + +async def test_remove_from_queue( + hass: HomeAssistant, config_entry: MockConfigEntry, controller: MockHeos +) -> None: + """Test the get queue service.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.services.async_call( + DOMAIN, + SERVICE_REMOVE_FROM_QUEUE, + {ATTR_ENTITY_ID: "media_player.test_player", ATTR_QUEUE_IDS: [1, "2"]}, + blocking=True, + ) + controller.player_remove_from_queue.assert_called_once_with(1, [1, 2]) diff --git a/tests/components/home_connect/test_binary_sensor.py b/tests/components/home_connect/test_binary_sensor.py index 31c15ec00cf..ce879a38de5 100644 --- a/tests/components/home_connect/test_binary_sensor.py +++ b/tests/components/home_connect/test_binary_sensor.py @@ -1,6 +1,7 @@ """Tests for home_connect binary_sensor entities.""" from collections.abc import Awaitable, Callable +from http import HTTPStatus from unittest.mock import AsyncMock, MagicMock from aiohomeconnect.model import ( @@ -39,6 +40,7 @@ import homeassistant.helpers.issue_registry as ir from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry +from tests.typing import ClientSessionGenerator @pytest.fixture @@ -165,6 +167,7 @@ async def test_connected_devices( assert len(new_entity_entries) > len(entity_entries) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") @pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_binary_sensors_entity_availability( hass: HomeAssistant, @@ -219,6 +222,7 @@ async def test_binary_sensors_entity_availability( assert state.state != STATE_UNAVAILABLE +@pytest.mark.usefixtures("entity_registry_enabled_by_default") @pytest.mark.parametrize("appliance", ["Washer"], indirect=True) @pytest.mark.parametrize( ("value", "expected"), @@ -402,7 +406,7 @@ async def test_connected_sensor_functionality( @pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_create_issue( +async def test_create_door_binary_sensor_deprecation_issue( hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -410,7 +414,7 @@ async def test_create_issue( client: MagicMock, issue_registry: ir.IssueRegistry, ) -> None: - """Test we create an issue when an automation or script is using a deprecated entity.""" + """Test that we create an issue when an automation or script is using a door binary sensor entity.""" entity_id = "binary_sensor.washer_door" issue_id = f"deprecated_binary_common_door_sensor_{entity_id}" @@ -464,3 +468,76 @@ async def test_create_issue( # Assert the issue is no longer present assert not issue_registry.async_get_issue(DOMAIN, issue_id) assert len(issue_registry.issues) == 0 + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_door_binary_sensor_deprecation_issue_fix( + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client: MagicMock, + issue_registry: ir.IssueRegistry, + hass_client: ClientSessionGenerator, +) -> None: + """Test that we create an issue when an automation or script is using a door binary sensor entity.""" + entity_id = "binary_sensor.washer_door" + issue_id = f"deprecated_binary_common_door_sensor_{entity_id}" + + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "alias": "test", + "trigger": {"platform": "state", "entity_id": entity_id}, + "action": { + "action": "automation.turn_on", + "target": { + "entity_id": "automation.test", + }, + }, + } + }, + ) + assert await async_setup_component( + hass, + script.DOMAIN, + { + script.DOMAIN: { + "test": { + "sequence": [ + { + "condition": "state", + "entity_id": entity_id, + "state": "on", + }, + ], + } + } + }, + ) + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup(client) + assert config_entry.state == ConfigEntryState.LOADED + + assert automations_with_entity(hass, entity_id)[0] == "automation.test" + assert scripts_with_entity(hass, entity_id)[0] == "script.test" + + assert len(issue_registry.issues) == 1 + issue = issue_registry.async_get_issue(DOMAIN, issue_id) + assert issue + + _client = await hass_client() + resp = await _client.post( + "/api/repairs/issues/fix", + json={"handler": DOMAIN, "issue_id": issue.issue_id}, + ) + assert resp.status == HTTPStatus.OK + flow_id = (await resp.json())["flow_id"] + resp = await _client.post(f"/api/repairs/issues/fix/{flow_id}") + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue(DOMAIN, issue_id) + assert len(issue_registry.issues) == 0 diff --git a/tests/components/home_connect/test_coordinator.py b/tests/components/home_connect/test_coordinator.py index 050758a6568..e6a3390b284 100644 --- a/tests/components/home_connect/test_coordinator.py +++ b/tests/components/home_connect/test_coordinator.py @@ -287,7 +287,7 @@ async def test_event_listener( assert config_entry.state == ConfigEntryState.LOADED state = hass.states.get(entity_id) - assert state + event_message = EventMessage( appliance.ha_id, event_type, @@ -309,7 +309,8 @@ async def test_event_listener( new_state = hass.states.get(entity_id) assert new_state - assert new_state.state != state.state + if state is not None: + assert new_state.state != state.state # Following, we are gonna check that the listeners are clean up correctly new_entity_id = entity_id + "_new" diff --git a/tests/components/home_connect/test_sensor.py b/tests/components/home_connect/test_sensor.py index f30723af7fa..e2f3761dcd9 100644 --- a/tests/components/home_connect/test_sensor.py +++ b/tests/components/home_connect/test_sensor.py @@ -1,6 +1,7 @@ """Tests for home_connect sensor entities.""" from collections.abc import Awaitable, Callable +import logging from unittest.mock import AsyncMock, MagicMock from aiohomeconnect.model import ( @@ -153,6 +154,29 @@ async def test_paired_depaired_devices_flow( for entity_entry in entity_entries: assert entity_registry.async_get(entity_entry.entity_id) + await client.add_events( + [ + EventMessage( + appliance.ha_id, + EventType.EVENT, + ArrayOfEvents( + [ + Event( + key=EventKey.LAUNDRY_CARE_WASHER_EVENT_I_DOS_1_FILL_LEVEL_POOR, + raw_key=EventKey.LAUNDRY_CARE_WASHER_EVENT_I_DOS_1_FILL_LEVEL_POOR.value, + timestamp=0, + level="", + handling="", + value=BSH_EVENT_PRESENT_STATE_PRESENT, + ) + ], + ), + ), + ] + ) + await hass.async_block_till_done() + assert hass.states.is_state("sensor.washer_poor_i_dos_1_fill_level", "present") + @pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_connected_devices( @@ -224,6 +248,28 @@ async def test_sensor_entity_availability( assert await integration_setup(client) assert config_entry.state == ConfigEntryState.LOADED + await client.add_events( + [ + EventMessage( + appliance.ha_id, + EventType.EVENT, + ArrayOfEvents( + [ + Event( + key=EventKey.DISHCARE_DISHWASHER_EVENT_SALT_NEARLY_EMPTY, + raw_key=EventKey.DISHCARE_DISHWASHER_EVENT_SALT_NEARLY_EMPTY.value, + timestamp=0, + level="", + handling="", + value=BSH_EVENT_PRESENT_STATE_OFF, + ) + ], + ), + ), + ] + ) + await hass.async_block_till_done() + for entity_id in entity_ids: state = hass.states.get(entity_id) assert state @@ -509,143 +555,148 @@ async def test_remaining_prog_time_edge_cases( ( "entity_id", "event_key", - "event_type", - "event_value_update", - "expected", + "value_expected_state", "appliance", ), [ ( "sensor.dishwasher_door", EventKey.BSH_COMMON_STATUS_DOOR_STATE, - EventType.STATUS, - BSH_DOOR_STATE_LOCKED, - "locked", + [ + ( + BSH_DOOR_STATE_LOCKED, + "locked", + ), + ( + BSH_DOOR_STATE_CLOSED, + "closed", + ), + ( + BSH_DOOR_STATE_OPEN, + "open", + ), + ], "Dishwasher", ), - ( - "sensor.dishwasher_door", - EventKey.BSH_COMMON_STATUS_DOOR_STATE, - EventType.STATUS, - BSH_DOOR_STATE_CLOSED, - "closed", - "Dishwasher", - ), - ( - "sensor.dishwasher_door", - EventKey.BSH_COMMON_STATUS_DOOR_STATE, - EventType.STATUS, - BSH_DOOR_STATE_OPEN, - "open", - "Dishwasher", - ), - ( - "sensor.fridgefreezer_freezer_door_alarm", - "EVENT_NOT_IN_STATUS_YET_SO_SET_TO_OFF", - EventType.EVENT, - "", - "off", - "FridgeFreezer", - ), - ( - "sensor.fridgefreezer_freezer_door_alarm", - EventKey.REFRIGERATION_FRIDGE_FREEZER_EVENT_DOOR_ALARM_FREEZER, - EventType.EVENT, - BSH_EVENT_PRESENT_STATE_OFF, - "off", - "FridgeFreezer", - ), - ( - "sensor.fridgefreezer_freezer_door_alarm", - EventKey.REFRIGERATION_FRIDGE_FREEZER_EVENT_DOOR_ALARM_FREEZER, - EventType.EVENT, - BSH_EVENT_PRESENT_STATE_PRESENT, - "present", - "FridgeFreezer", - ), - ( - "sensor.fridgefreezer_freezer_door_alarm", - EventKey.REFRIGERATION_FRIDGE_FREEZER_EVENT_DOOR_ALARM_FREEZER, - EventType.EVENT, - BSH_EVENT_PRESENT_STATE_CONFIRMED, - "confirmed", - "FridgeFreezer", - ), - ( - "sensor.coffeemaker_bean_container_empty", - EventType.EVENT, - "EVENT_NOT_IN_STATUS_YET_SO_SET_TO_OFF", - "", - "off", - "CoffeeMaker", - ), - ( - "sensor.coffeemaker_bean_container_empty", - EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_BEAN_CONTAINER_EMPTY, - EventType.EVENT, - BSH_EVENT_PRESENT_STATE_OFF, - "off", - "CoffeeMaker", - ), - ( - "sensor.coffeemaker_bean_container_empty", - EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_BEAN_CONTAINER_EMPTY, - EventType.EVENT, - BSH_EVENT_PRESENT_STATE_PRESENT, - "present", - "CoffeeMaker", - ), - ( - "sensor.coffeemaker_bean_container_empty", - EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_BEAN_CONTAINER_EMPTY, - EventType.EVENT, - BSH_EVENT_PRESENT_STATE_CONFIRMED, - "confirmed", - "CoffeeMaker", - ), ], indirect=["appliance"], ) async def test_sensors_states( entity_id: str, event_key: EventKey, - event_type: EventType, - event_value_update: str, + value_expected_state: list[tuple[str, str]], appliance: HomeAppliance, - expected: str, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, client: MagicMock, ) -> None: - """Tests for appliance alarm sensors.""" + """Tests for appliance sensors.""" assert config_entry.state == ConfigEntryState.NOT_LOADED assert await integration_setup(client) assert config_entry.state == ConfigEntryState.LOADED - await client.add_events( - [ - EventMessage( - appliance.ha_id, - event_type, - ArrayOfEvents( - [ - Event( - key=event_key, - raw_key=str(event_key), - timestamp=0, - level="", - handling="", - value=event_value_update, - ) - ], + for value, expected_state in value_expected_state: + await client.add_events( + [ + EventMessage( + appliance.ha_id, + EventType.STATUS, + ArrayOfEvents( + [ + Event( + key=event_key, + raw_key=str(event_key), + timestamp=0, + level="", + handling="", + value=value, + ) + ], + ), ), - ), - ] - ) - await hass.async_block_till_done() - assert hass.states.is_state(entity_id, expected) + ] + ) + await hass.async_block_till_done() + assert hass.states.is_state(entity_id, expected_state) + + +@pytest.mark.parametrize( + ( + "entity_id", + "event_key", + "appliance", + ), + [ + ( + "sensor.fridgefreezer_freezer_door_alarm", + EventKey.REFRIGERATION_FRIDGE_FREEZER_EVENT_DOOR_ALARM_FREEZER, + "FridgeFreezer", + ), + ( + "sensor.coffeemaker_bean_container_empty", + EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_BEAN_CONTAINER_EMPTY, + "CoffeeMaker", + ), + ], + indirect=["appliance"], +) +async def test_event_sensors_states( + entity_id: str, + event_key: EventKey, + appliance: HomeAppliance, + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client: MagicMock, + entity_registry: er.EntityRegistry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Tests for appliance event sensors.""" + caplog.set_level(logging.ERROR) + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup(client) + assert config_entry.state == ConfigEntryState.LOADED + + assert not hass.states.get(entity_id) + + for value, expected_state in ( + (BSH_EVENT_PRESENT_STATE_OFF, "off"), + (BSH_EVENT_PRESENT_STATE_PRESENT, "present"), + (BSH_EVENT_PRESENT_STATE_CONFIRMED, "confirmed"), + ): + await client.add_events( + [ + EventMessage( + appliance.ha_id, + EventType.EVENT, + ArrayOfEvents( + [ + Event( + key=event_key, + raw_key=str(event_key), + timestamp=0, + level="", + handling="", + value=value, + ) + ], + ), + ), + ] + ) + await hass.async_block_till_done() + assert hass.states.is_state(entity_id, expected_state) + + # Verify that the integration doesn't attempt to add the event sensors more than once + # If that happens, the EntityPlatform logs an error with the entity's unique ID. + assert "exists" not in caplog.text + assert entity_id not in caplog.text + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry + assert entity_entry.unique_id not in caplog.text @pytest.mark.parametrize( diff --git a/tests/components/home_connect/test_switch.py b/tests/components/home_connect/test_switch.py index 2903c8ac718..01f9cad5d2e 100644 --- a/tests/components/home_connect/test_switch.py +++ b/tests/components/home_connect/test_switch.py @@ -1,6 +1,7 @@ """Tests for home_connect sensor entities.""" from collections.abc import Awaitable, Callable +from http import HTTPStatus from typing import Any from unittest.mock import AsyncMock, MagicMock @@ -59,6 +60,7 @@ from homeassistant.helpers import ( from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry +from tests.typing import ClientSessionGenerator @pytest.fixture @@ -209,6 +211,7 @@ async def test_connected_devices( assert len(new_entity_entries) > len(entity_entries) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") @pytest.mark.parametrize("appliance", ["Dishwasher"], indirect=True) async def test_switch_entity_availability( hass: HomeAssistant, @@ -320,6 +323,7 @@ async def test_switch_functionality( assert hass.states.is_state(entity_id, state) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") @pytest.mark.parametrize( ("entity_id", "program_key", "initial_state", "appliance"), [ @@ -397,6 +401,7 @@ async def test_program_switch_functionality( client.stop_program.assert_awaited_once_with(appliance.ha_id) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") @pytest.mark.parametrize( ( "entity_id", @@ -801,18 +806,24 @@ async def test_power_switch_service_validation_errors( @pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_create_issue( +@pytest.mark.parametrize( + "service", + [SERVICE_TURN_ON, SERVICE_TURN_OFF], +) +async def test_create_program_switch_deprecation_issue( hass: HomeAssistant, appliance: HomeAppliance, + service: str, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, client: MagicMock, issue_registry: ir.IssueRegistry, ) -> None: - """Test we create an issue when an automation or script is using a deprecated entity.""" + """Test that we create an issue when an automation or script is using a program switch entity or the entity is used by the user.""" entity_id = "switch.washer_program_mix" - issue_id = f"deprecated_program_switch_{entity_id}" + automation_script_issue_id = f"deprecated_program_switch_{entity_id}" + action_handler_issue_id = f"deprecated_program_switch_{entity_id}" assert await async_setup_component( hass, @@ -851,17 +862,118 @@ async def test_create_issue( assert await integration_setup(client) assert config_entry.state == ConfigEntryState.LOADED + await hass.services.async_call( + SWITCH_DOMAIN, + service, + { + ATTR_ENTITY_ID: entity_id, + }, + blocking=True, + ) + assert automations_with_entity(hass, entity_id)[0] == "automation.test" assert scripts_with_entity(hass, entity_id)[0] == "script.test" - assert len(issue_registry.issues) == 1 - assert issue_registry.async_get_issue(DOMAIN, issue_id) + assert len(issue_registry.issues) == 2 + assert issue_registry.async_get_issue(DOMAIN, automation_script_issue_id) + assert issue_registry.async_get_issue(DOMAIN, action_handler_issue_id) await hass.config_entries.async_unload(config_entry.entry_id) await hass.async_block_till_done() # Assert the issue is no longer present - assert not issue_registry.async_get_issue(DOMAIN, issue_id) + assert not issue_registry.async_get_issue(DOMAIN, automation_script_issue_id) + assert not issue_registry.async_get_issue(DOMAIN, action_handler_issue_id) + assert len(issue_registry.issues) == 0 + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize( + "service", + [SERVICE_TURN_ON, SERVICE_TURN_OFF], +) +async def test_program_switch_deprecation_issue_fix( + hass: HomeAssistant, + appliance: HomeAppliance, + service: str, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client: MagicMock, + issue_registry: ir.IssueRegistry, + hass_client: ClientSessionGenerator, +) -> None: + """Test we can fix the issues created when a program switch entity is in an automation or in a script or when is used.""" + entity_id = "switch.washer_program_mix" + automation_script_issue_id = f"deprecated_program_switch_{entity_id}" + action_handler_issue_id = f"deprecated_program_switch_{entity_id}" + + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "alias": "test", + "trigger": {"platform": "state", "entity_id": entity_id}, + "action": { + "action": "automation.turn_on", + "target": { + "entity_id": "automation.test", + }, + }, + } + }, + ) + assert await async_setup_component( + hass, + script.DOMAIN, + { + script.DOMAIN: { + "test": { + "sequence": [ + { + "action": "switch.turn_on", + "entity_id": entity_id, + }, + ], + } + } + }, + ) + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup(client) + assert config_entry.state == ConfigEntryState.LOADED + + await hass.services.async_call( + SWITCH_DOMAIN, + service, + { + ATTR_ENTITY_ID: entity_id, + }, + blocking=True, + ) + + assert automations_with_entity(hass, entity_id)[0] == "automation.test" + assert scripts_with_entity(hass, entity_id)[0] == "script.test" + + assert len(issue_registry.issues) == 2 + assert issue_registry.async_get_issue(DOMAIN, automation_script_issue_id) + assert issue_registry.async_get_issue(DOMAIN, action_handler_issue_id) + + for issue in issue_registry.issues.copy().values(): + _client = await hass_client() + resp = await _client.post( + "/api/repairs/issues/fix", + json={"handler": DOMAIN, "issue_id": issue.issue_id}, + ) + assert resp.status == HTTPStatus.OK + flow_id = (await resp.json())["flow_id"] + resp = await _client.post(f"/api/repairs/issues/fix/{flow_id}") + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue(DOMAIN, automation_script_issue_id) + assert not issue_registry.async_get_issue(DOMAIN, action_handler_issue_id) assert len(issue_registry.issues) == 0 diff --git a/tests/components/home_connect/test_time.py b/tests/components/home_connect/test_time.py index 6be23460cac..8c23a09053a 100644 --- a/tests/components/home_connect/test_time.py +++ b/tests/components/home_connect/test_time.py @@ -2,6 +2,7 @@ from collections.abc import Awaitable, Callable from datetime import time +from http import HTTPStatus from unittest.mock import AsyncMock, MagicMock from aiohomeconnect.model import ( @@ -16,15 +17,26 @@ from aiohomeconnect.model import ( from aiohomeconnect.model.error import HomeConnectApiError, HomeConnectError import pytest +from homeassistant.components.automation import ( + DOMAIN as AUTOMATION_DOMAIN, + automations_with_entity, +) from homeassistant.components.home_connect.const import DOMAIN +from homeassistant.components.script import DOMAIN as SCRIPT_DOMAIN, scripts_with_entity from homeassistant.components.time import DOMAIN as TIME_DOMAIN, SERVICE_SET_VALUE from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_ENTITY_ID, ATTR_TIME, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import ( + device_registry as dr, + entity_registry as er, + issue_registry as ir, +) +from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry +from tests.typing import ClientSessionGenerator @pytest.fixture @@ -45,6 +57,7 @@ async def test_time( assert config_entry.state is ConfigEntryState.LOADED +@pytest.mark.usefixtures("entity_registry_enabled_by_default") @pytest.mark.parametrize("appliance", ["Oven"], indirect=True) async def test_paired_depaired_devices_flow( appliance: HomeAppliance, @@ -99,6 +112,7 @@ async def test_paired_depaired_devices_flow( assert entity_registry.async_get(entity_entry.entity_id) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") @pytest.mark.parametrize("appliance", ["Oven"], indirect=True) async def test_connected_devices( appliance: HomeAppliance, @@ -151,6 +165,7 @@ async def test_connected_devices( assert len(new_entity_entries) > len(entity_entries) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") @pytest.mark.parametrize("appliance", ["Oven"], indirect=True) async def test_time_entity_availability( hass: HomeAssistant, @@ -204,6 +219,7 @@ async def test_time_entity_availability( assert state.state != STATE_UNAVAILABLE +@pytest.mark.usefixtures("entity_registry_enabled_by_default") @pytest.mark.parametrize("appliance", ["Oven"], indirect=True) @pytest.mark.parametrize( ("entity_id", "setting_key"), @@ -248,6 +264,7 @@ async def test_time_entity_functionality( assert hass.states.is_state(entity_id, str(time(second=value))) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") @pytest.mark.parametrize( ("entity_id", "setting_key", "mock_attr"), [ @@ -299,3 +316,170 @@ async def test_time_entity_error( blocking=True, ) assert getattr(client_with_exception, mock_attr).call_count == 2 + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize("appliance", ["Oven"], indirect=True) +async def test_create_alarm_clock_deprecation_issue( + hass: HomeAssistant, + appliance: HomeAppliance, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client: MagicMock, + issue_registry: ir.IssueRegistry, +) -> None: + """Test that we create an issue when an automation or script is using a alarm clock time entity or the entity is used by the user.""" + entity_id = f"{TIME_DOMAIN}.oven_alarm_clock" + automation_script_issue_id = ( + f"deprecated_time_alarm_clock_in_automations_scripts_{entity_id}" + ) + action_handler_issue_id = f"deprecated_time_alarm_clock_{entity_id}" + + assert await async_setup_component( + hass, + AUTOMATION_DOMAIN, + { + AUTOMATION_DOMAIN: { + "alias": "test", + "trigger": {"platform": "state", "entity_id": entity_id}, + "action": { + "action": "automation.turn_on", + "target": { + "entity_id": "automation.test", + }, + }, + } + }, + ) + assert await async_setup_component( + hass, + SCRIPT_DOMAIN, + { + SCRIPT_DOMAIN: { + "test": { + "sequence": [ + { + "action": "switch.turn_on", + "entity_id": entity_id, + }, + ], + } + } + }, + ) + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup(client) + assert config_entry.state == ConfigEntryState.LOADED + + await hass.services.async_call( + TIME_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: entity_id, + ATTR_TIME: time(minute=1), + }, + blocking=True, + ) + + assert automations_with_entity(hass, entity_id)[0] == "automation.test" + assert scripts_with_entity(hass, entity_id)[0] == "script.test" + + assert len(issue_registry.issues) == 2 + assert issue_registry.async_get_issue(DOMAIN, automation_script_issue_id) + assert issue_registry.async_get_issue(DOMAIN, action_handler_issue_id) + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue(DOMAIN, automation_script_issue_id) + assert not issue_registry.async_get_issue(DOMAIN, action_handler_issue_id) + assert len(issue_registry.issues) == 0 + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize("appliance", ["Oven"], indirect=True) +async def test_alarm_clock_deprecation_issue_fix( + hass: HomeAssistant, + appliance: HomeAppliance, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client: MagicMock, + issue_registry: ir.IssueRegistry, + hass_client: ClientSessionGenerator, +) -> None: + """Test we can fix the issues created when a alarm clock time entity is in an automation or in a script or when is used.""" + entity_id = f"{TIME_DOMAIN}.oven_alarm_clock" + automation_script_issue_id = ( + f"deprecated_time_alarm_clock_in_automations_scripts_{entity_id}" + ) + action_handler_issue_id = f"deprecated_time_alarm_clock_{entity_id}" + + assert await async_setup_component( + hass, + AUTOMATION_DOMAIN, + { + AUTOMATION_DOMAIN: { + "alias": "test", + "trigger": {"platform": "state", "entity_id": entity_id}, + "action": { + "action": "automation.turn_on", + "target": { + "entity_id": "automation.test", + }, + }, + } + }, + ) + assert await async_setup_component( + hass, + SCRIPT_DOMAIN, + { + SCRIPT_DOMAIN: { + "test": { + "sequence": [ + { + "action": "switch.turn_on", + "entity_id": entity_id, + }, + ], + } + } + }, + ) + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup(client) + assert config_entry.state == ConfigEntryState.LOADED + + await hass.services.async_call( + TIME_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: entity_id, + ATTR_TIME: time(minute=1), + }, + blocking=True, + ) + + assert len(issue_registry.issues) == 2 + assert issue_registry.async_get_issue(DOMAIN, automation_script_issue_id) + assert issue_registry.async_get_issue(DOMAIN, action_handler_issue_id) + + for issue in issue_registry.issues.copy().values(): + _client = await hass_client() + resp = await _client.post( + "/api/repairs/issues/fix", + json={"handler": DOMAIN, "issue_id": issue.issue_id}, + ) + assert resp.status == HTTPStatus.OK + flow_id = (await resp.json())["flow_id"] + resp = await _client.post(f"/api/repairs/issues/fix/{flow_id}") + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue(DOMAIN, automation_script_issue_id) + assert not issue_registry.async_get_issue(DOMAIN, action_handler_issue_id) + assert len(issue_registry.issues) == 0 diff --git a/tests/components/homeassistant_sky_connect/test_update.py b/tests/components/homeassistant_sky_connect/test_update.py index 9fb7528987e..b6c7291e0af 100644 --- a/tests/components/homeassistant_sky_connect/test_update.py +++ b/tests/components/homeassistant_sky_connect/test_update.py @@ -1,5 +1,7 @@ """Test SkyConnect firmware update entity.""" +import pytest + from homeassistant.components.homeassistant_hardware.helpers import ( async_notify_firmware_info, ) @@ -14,9 +16,7 @@ from .common import USB_DATA_ZBT1 from tests.common import MockConfigEntry -UPDATE_ENTITY_ID = ( - "update.homeassistant_sky_connect_9e2adbd75b8beb119fe564a0f320645d_firmware" -) +UPDATE_ENTITY_ID = "update.home_assistant_connect_zbt_1_9e2adbd7_firmware" async def test_zbt1_update_entity(hass: HomeAssistant) -> None: @@ -59,8 +59,9 @@ async def test_zbt1_update_entity(hass: HomeAssistant) -> None: await hass.async_block_till_done() state_ezsp = hass.states.get(UPDATE_ENTITY_ID) + assert state_ezsp is not None assert state_ezsp.state == "unknown" - assert state_ezsp.attributes["title"] == "EmberZNet" + assert state_ezsp.attributes["title"] == "EmberZNet Zigbee" assert state_ezsp.attributes["installed_version"] == "7.3.1.0" assert state_ezsp.attributes["latest_version"] is None @@ -80,7 +81,52 @@ async def test_zbt1_update_entity(hass: HomeAssistant) -> None: # After the firmware update, the entity has the new version and the correct state state_spinel = hass.states.get(UPDATE_ENTITY_ID) + assert state_spinel is not None assert state_spinel.state == "unknown" assert state_spinel.attributes["title"] == "OpenThread RCP" assert state_spinel.attributes["installed_version"] == "2.4.4.0" assert state_spinel.attributes["latest_version"] is None + + +@pytest.mark.parametrize( + ("firmware", "version", "expected"), + [ + ("ezsp", "7.3.1.0 build 0", "EmberZNet Zigbee 7.3.1.0"), + ("spinel", "SL-OPENTHREAD/2.4.4.0_GitHub-7074a43e4", "OpenThread RCP 2.4.4.0"), + ("bootloader", "2.4.2", "Gecko Bootloader 2.4.2"), + ("cpc", "4.3.2", "Multiprotocol 4.3.2"), + ("router", "1.2.3.4", "Unknown 1.2.3.4"), # Not supported but still shown + ], +) +async def test_zbt1_update_entity_state( + hass: HomeAssistant, firmware: str, version: str, expected: str +) -> None: + """Test the ZBT-1 firmware update entity with different firmware types.""" + await async_setup_component(hass, "homeassistant", {}) + + zbt1_config_entry = MockConfigEntry( + domain="homeassistant_sky_connect", + data={ + "firmware": firmware, + "firmware_version": version, + "device": USB_DATA_ZBT1.device, + "manufacturer": USB_DATA_ZBT1.manufacturer, + "pid": USB_DATA_ZBT1.pid, + "product": USB_DATA_ZBT1.description, + "serial_number": USB_DATA_ZBT1.serial_number, + "vid": USB_DATA_ZBT1.vid, + }, + version=1, + minor_version=3, + ) + zbt1_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(zbt1_config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get(UPDATE_ENTITY_ID) + assert state is not None + assert ( + f"{state.attributes['title']} {state.attributes['installed_version']}" + == expected + ) diff --git a/tests/components/homeassistant_yellow/test_update.py b/tests/components/homeassistant_yellow/test_update.py index 269ff2afc49..2cc7b51836c 100644 --- a/tests/components/homeassistant_yellow/test_update.py +++ b/tests/components/homeassistant_yellow/test_update.py @@ -2,6 +2,8 @@ from unittest.mock import patch +import pytest + from homeassistant.components.homeassistant_hardware.helpers import ( async_notify_firmware_info, ) @@ -15,7 +17,7 @@ from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry -UPDATE_ENTITY_ID = "update.homeassistant_yellow_firmware" +UPDATE_ENTITY_ID = "update.home_assistant_yellow_firmware" async def test_yellow_update_entity(hass: HomeAssistant) -> None: @@ -24,6 +26,7 @@ async def test_yellow_update_entity(hass: HomeAssistant) -> None: # Set up the Yellow integration yellow_config_entry = MockConfigEntry( + title="Home Assistant Yellow", domain="homeassistant_yellow", data={ "firmware": "ezsp", @@ -62,8 +65,9 @@ async def test_yellow_update_entity(hass: HomeAssistant) -> None: await hass.async_block_till_done() state_ezsp = hass.states.get(UPDATE_ENTITY_ID) + assert state_ezsp is not None assert state_ezsp.state == "unknown" - assert state_ezsp.attributes["title"] == "EmberZNet" + assert state_ezsp.attributes["title"] == "EmberZNet Zigbee" assert state_ezsp.attributes["installed_version"] == "7.3.1.0" assert state_ezsp.attributes["latest_version"] is None @@ -83,7 +87,58 @@ async def test_yellow_update_entity(hass: HomeAssistant) -> None: # After the firmware update, the entity has the new version and the correct state state_spinel = hass.states.get(UPDATE_ENTITY_ID) + assert state_spinel is not None assert state_spinel.state == "unknown" assert state_spinel.attributes["title"] == "OpenThread RCP" assert state_spinel.attributes["installed_version"] == "2.4.4.0" assert state_spinel.attributes["latest_version"] is None + + +@pytest.mark.parametrize( + ("firmware", "version", "expected"), + [ + ("ezsp", "7.3.1.0 build 0", "EmberZNet Zigbee 7.3.1.0"), + ("spinel", "SL-OPENTHREAD/2.4.4.0_GitHub-7074a43e4", "OpenThread RCP 2.4.4.0"), + ("bootloader", "2.4.2", "Gecko Bootloader 2.4.2"), + ("cpc", "4.3.2", "Multiprotocol 4.3.2"), + ("router", "1.2.3.4", "Unknown 1.2.3.4"), # Not supported but still shown + ], +) +async def test_yellow_update_entity_state( + hass: HomeAssistant, firmware: str, version: str, expected: str +) -> None: + """Test the Yellow firmware update entity with different firmware types.""" + await async_setup_component(hass, "homeassistant", {}) + + # Set up the Yellow integration + yellow_config_entry = MockConfigEntry( + title="Home Assistant Yellow", + domain="homeassistant_yellow", + data={ + "firmware": firmware, + "firmware_version": version, + "device": RADIO_DEVICE, + }, + version=1, + minor_version=3, + ) + yellow_config_entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.homeassistant_yellow.is_hassio", return_value=True + ), + patch( + "homeassistant.components.homeassistant_yellow.get_os_info", + return_value={"board": "yellow"}, + ), + ): + assert await hass.config_entries.async_setup(yellow_config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get(UPDATE_ENTITY_ID) + assert state is not None + assert ( + f"{state.attributes['title']} {state.attributes['installed_version']}" + == expected + ) diff --git a/tests/components/homee/fixtures/lock.json b/tests/components/homee/fixtures/lock.json new file mode 100644 index 00000000000..79fd53e0311 --- /dev/null +++ b/tests/components/homee/fixtures/lock.json @@ -0,0 +1,52 @@ +{ + "id": 1, + "name": "Test Lock", + "profile": 2007, + "image": "default", + "favorite": 0, + "order": 31, + "protocol": 1, + "routing": 0, + "state": 1, + "state_changed": 1711799526, + "added": 1645036891, + "history": 1, + "cube_type": 1, + "note": "", + "services": 3, + "phonetic_name": "", + "owner": 2, + "security": 0, + "attributes": [ + { + "id": 1, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 1.0, + "unit": "", + "step_value": 1.0, + "editable": 1, + "type": 232, + "state": 1, + "last_changed": 1711897362, + "changed_by": 4, + "changed_by_id": 5, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["toggle"], + "history": { + "day": 35, + "week": 5, + "month": 1, + "stepped": true + } + } + } + ] +} diff --git a/tests/components/homee/snapshots/test_lock.ambr b/tests/components/homee/snapshots/test_lock.ambr new file mode 100644 index 00000000000..d055039cca4 --- /dev/null +++ b/tests/components/homee/snapshots/test_lock.ambr @@ -0,0 +1,50 @@ +# serializer version: 1 +# name: test_lock_snapshot[lock.test_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'lock', + 'entity_category': None, + 'entity_id': 'lock.test_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00055511EECC-1-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_lock_snapshot[lock.test_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'changed_by': 'unknown-5', + 'friendly_name': 'Test Lock', + 'supported_features': , + }), + 'context': , + 'entity_id': 'lock.test_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unlocked', + }) +# --- diff --git a/tests/components/homee/test_lock.py b/tests/components/homee/test_lock.py new file mode 100644 index 00000000000..3e6ff3f8ec6 --- /dev/null +++ b/tests/components/homee/test_lock.py @@ -0,0 +1,125 @@ +"""Test Homee locks.""" + +from unittest.mock import MagicMock, patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.lock import ( + DOMAIN as LOCK_DOMAIN, + SERVICE_LOCK, + SERVICE_UNLOCK, + LockState, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import build_mock_node, setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def setup_lock( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_homee: MagicMock +) -> None: + """Setups the integration lock tests.""" + mock_homee.nodes = [build_mock_node("lock.json")] + mock_homee.get_node_by_id.return_value = mock_homee.nodes[0] + await setup_integration(hass, mock_config_entry) + + +@pytest.mark.parametrize( + ("service", "target_value"), + [ + (SERVICE_LOCK, 1), + (SERVICE_UNLOCK, 0), + ], +) +async def test_lock_services( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_homee: MagicMock, + service: str, + target_value: int, +) -> None: + """Test lock services.""" + await setup_lock(hass, mock_config_entry, mock_homee) + + await hass.services.async_call( + LOCK_DOMAIN, + service, + {ATTR_ENTITY_ID: "lock.test_lock"}, + ) + mock_homee.set_value.assert_called_once_with(1, 1, target_value) + + +@pytest.mark.parametrize( + ("target_value", "current_value", "expected"), + [ + (1.0, 1.0, LockState.LOCKED), + (0.0, 0.0, LockState.UNLOCKED), + (1.0, 0.0, LockState.LOCKING), + (0.0, 1.0, LockState.UNLOCKING), + ], +) +async def test_lock_state( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_homee: MagicMock, + target_value: float, + current_value: float, + expected: LockState, +) -> None: + """Test lock state.""" + mock_homee.nodes = [build_mock_node("lock.json")] + mock_homee.get_node_by_id.return_value = mock_homee.nodes[0] + attribute = mock_homee.nodes[0].attributes[0] + attribute.target_value = target_value + attribute.current_value = current_value + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("lock.test_lock").state == expected + + +@pytest.mark.parametrize( + ("attr_changed_by", "changed_by_id", "expected"), + [ + (1, 0, "itself-0"), + (2, 1, "user-testuser"), + (3, 54, "homeegram-54"), + (6, 0, "ai-0"), + ], +) +async def test_lock_changed_by( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_homee: MagicMock, + attr_changed_by: int, + changed_by_id: int, + expected: str, +) -> None: + """Test lock changed by entries.""" + mock_homee.nodes = [build_mock_node("lock.json")] + mock_homee.get_node_by_id.return_value = mock_homee.nodes[0] + mock_homee.get_user_by_id.return_value = MagicMock(username="testuser") + attribute = mock_homee.nodes[0].attributes[0] + attribute.changed_by = attr_changed_by + attribute.changed_by_id = changed_by_id + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("lock.test_lock").attributes["changed_by"] == expected + + +async def test_lock_snapshot( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_homee: MagicMock, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the lock snapshots.""" + with patch("homeassistant.components.homee.PLATFORMS", [Platform.LOCK]): + await setup_lock(hass, mock_config_entry, mock_homee) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/hue/test_light_v2.py b/tests/components/hue/test_light_v2.py index c831d40d261..3d323d4d31c 100644 --- a/tests/components/hue/test_light_v2.py +++ b/tests/components/hue/test_light_v2.py @@ -42,8 +42,8 @@ async def test_lights( assert light_1.attributes["min_mireds"] == 153 assert light_1.attributes["max_mireds"] == 500 assert light_1.attributes["dynamics"] == "dynamic_palette" - assert light_1.attributes["effect_list"] == ["None", "candle", "fire"] - assert light_1.attributes["effect"] == "None" + assert light_1.attributes["effect_list"] == ["off", "candle", "fire"] + assert light_1.attributes["effect"] == "off" # test light which supports color temperature only light_2 = hass.states.get("light.hue_light_with_color_temperature_only") @@ -57,7 +57,7 @@ async def test_lights( assert light_2.attributes["min_mireds"] == 153 assert light_2.attributes["max_mireds"] == 454 assert light_2.attributes["dynamics"] == "none" - assert light_2.attributes["effect_list"] == ["None", "candle", "sunrise"] + assert light_2.attributes["effect_list"] == ["off", "candle", "sunrise"] # test light which supports color only light_3 = hass.states.get("light.hue_light_with_color_only") @@ -201,7 +201,7 @@ async def test_light_turn_on_service( await hass.services.async_call( "light", "turn_on", - {"entity_id": test_light_id, "effect": "None"}, + {"entity_id": test_light_id, "effect": "off"}, blocking=True, ) assert len(mock_bridge_v2.mock_requests) == 8 @@ -216,14 +216,14 @@ async def test_light_turn_on_service( await hass.async_block_till_done() test_light = hass.states.get(test_light_id) assert test_light is not None - assert test_light.attributes["effect"] == "None" + assert test_light.attributes["effect"] == "off" # test turn on with useless effect # it should send a effect in the request if the device has no effect active await hass.services.async_call( "light", "turn_on", - {"entity_id": test_light_id, "effect": "None"}, + {"entity_id": test_light_id, "effect": "off"}, blocking=True, ) assert len(mock_bridge_v2.mock_requests) == 9 diff --git a/tests/components/husqvarna_automower/fixtures/mower.json b/tests/components/husqvarna_automower/fixtures/mower.json index ee368bf6546..06e11ec1252 100644 --- a/tests/components/husqvarna_automower/fixtures/mower.json +++ b/tests/components/husqvarna_automower/fixtures/mower.json @@ -176,7 +176,7 @@ ], "statistics": { "cuttingBladeUsageTime": 123, - "downTime": 123, + "downTime": 3600, "numberOfChargingCycles": 1380, "numberOfCollisions": 11396, "totalChargingTime": 4334400, @@ -184,7 +184,7 @@ "totalDriveDistance": 1780272, "totalRunningTime": 4564800, "totalSearchingTime": 370800, - "upTime": 456 + "upTime": 7200 }, "stayOutZones": { "dirty": false, diff --git a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr index 9d5004c8f6d..d5546b0d2af 100644 --- a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr @@ -95,7 +95,7 @@ }), 'statistics': dict({ 'cutting_blade_usage_time': 123, - 'downtime': 123, + 'downtime': 3600, 'number_of_charging_cycles': 1380, 'number_of_collisions': 11396, 'total_charging_time': 4334400, @@ -103,7 +103,7 @@ 'total_drive_distance': 1780272, 'total_running_time': 4564800, 'total_searching_time': 370800, - 'uptime': 456, + 'uptime': 7200, }), 'stay_out_zones': dict({ 'dirty': False, diff --git a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr index 02a64718276..92320de6fdb 100644 --- a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr @@ -106,6 +106,64 @@ 'state': '0.034', }) # --- +# name: test_sensor_snapshot[sensor.test_mower_1_downtime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_mower_1_downtime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Downtime', + 'platform': 'husqvarna_automower', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'downtime', + 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_downtime', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_snapshot[sensor.test_mower_1_downtime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Test Mower 1 Downtime', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_mower_1_downtime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.0', + }) +# --- # name: test_sensor_snapshot[sensor.test_mower_1_error-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1208,6 +1266,64 @@ 'state': '103.000', }) # --- +# name: test_sensor_snapshot[sensor.test_mower_1_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_mower_1_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Uptime', + 'platform': 'husqvarna_automower', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'uptime', + 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_uptime', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_snapshot[sensor.test_mower_1_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Test Mower 1 Uptime', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_mower_1_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.0', + }) +# --- # name: test_sensor_snapshot[sensor.test_mower_1_work_area-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/husqvarna_automower/test_sensor.py b/tests/components/husqvarna_automower/test_sensor.py index 08ed5251344..85d20178e73 100644 --- a/tests/components/husqvarna_automower/test_sensor.py +++ b/tests/components/husqvarna_automower/test_sensor.py @@ -110,6 +110,18 @@ async def test_work_area_sensor( state = hass.states.get("sensor.test_mower_1_work_area") assert state.state == "my_lawn" + # Test EPOS mower, which returns work_area_id = 0, when no + # work area is active and has no default work_area_id=0 + values[TEST_MOWER_ID].mower.work_area_id = 0 + del values[TEST_MOWER_ID].work_areas[0] + del values[TEST_MOWER_ID].work_area_dict[0] + mock_automower_client.get_status.return_value = values + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + state = hass.states.get("sensor.test_mower_1_work_area") + assert state.state == "no_work_area_active" + @pytest.mark.usefixtures("entity_registry_enabled_by_default") @pytest.mark.parametrize( diff --git a/tests/components/iometer/__init__.py b/tests/components/iometer/__init__.py index 9e48fb982b3..19fe2124f1f 100644 --- a/tests/components/iometer/__init__.py +++ b/tests/components/iometer/__init__.py @@ -1,13 +1,19 @@ """Tests for the IOmeter integration.""" +from unittest.mock import patch + +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry -async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Fixture for setting up the component.""" +async def setup_platform( + hass: HomeAssistant, config_entry: MockConfigEntry, platforms: list[Platform] +) -> MockConfigEntry: + """Fixture for setting up the IOmeter platform.""" config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() + with patch("homeassistant.components.iometer.PLATFORMS", platforms): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/iometer/conftest.py b/tests/components/iometer/conftest.py index ee45021952e..f8139c7c64c 100644 --- a/tests/components/iometer/conftest.py +++ b/tests/components/iometer/conftest.py @@ -54,4 +54,5 @@ def mock_config_entry() -> MockConfigEntry: title="IOmeter-1ISK0000000000", data={CONF_HOST: "10.0.0.2"}, unique_id="658c2b34-2017-45f2-a12b-731235f8bb97", + entry_id="01JQ6G5395176MAAWKAAPEZHV6", ) diff --git a/tests/components/iometer/snapshots/test_binary_sensor.ambr b/tests/components/iometer/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..38aab735a14 --- /dev/null +++ b/tests/components/iometer/snapshots/test_binary_sensor.ambr @@ -0,0 +1,97 @@ +# serializer version: 1 +# name: test_binary_sensors[binary_sensor.iometer_1isk0000000000_core_attachment_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.iometer_1isk0000000000_core_attachment_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Core attachment status', + 'platform': 'iometer', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'attachment_status', + 'unique_id': '01JQ6G5395176MAAWKAAPEZHV6_attachment_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.iometer_1isk0000000000_core_attachment_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'IOmeter-1ISK0000000000 Core attachment status', + }), + 'context': , + 'entity_id': 'binary_sensor.iometer_1isk0000000000_core_attachment_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[binary_sensor.iometer_1isk0000000000_core_bridge_connection_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.iometer_1isk0000000000_core_bridge_connection_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Core/Bridge connection status', + 'platform': 'iometer', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'connection_status', + 'unique_id': '01JQ6G5395176MAAWKAAPEZHV6_connection_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.iometer_1isk0000000000_core_bridge_connection_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'IOmeter-1ISK0000000000 Core/Bridge connection status', + }), + 'context': , + 'entity_id': 'binary_sensor.iometer_1isk0000000000_core_bridge_connection_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/iometer/test_binary_sensor.py b/tests/components/iometer/test_binary_sensor.py new file mode 100644 index 00000000000..e007084567e --- /dev/null +++ b/tests/components/iometer/test_binary_sensor.py @@ -0,0 +1,135 @@ +"""Test the IOmeter binary sensors.""" + +from datetime import timedelta +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_platform + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_binary_sensors( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_iometer_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test binary sensors.""" + await setup_platform(hass, mock_config_entry, [Platform.BINARY_SENSOR]) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_connection_status_sensors( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_iometer_client: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test connection status sensor.""" + await setup_platform(hass, mock_config_entry, [Platform.BINARY_SENSOR]) + + assert ( + hass.states.get( + "binary_sensor.iometer_1isk0000000000_core_bridge_connection_status" + ).state + == STATE_ON + ) + + freezer.tick(delta=timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_iometer_client.get_current_status.return_value.device.core.connection_status = "disconnected" + + freezer.tick(delta=timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert ( + hass.states.get( + "binary_sensor.iometer_1isk0000000000_core_bridge_connection_status" + ).state + == STATE_OFF + ) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_attachment_status_sensors( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_iometer_client: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test connection status sensor.""" + await setup_platform(hass, mock_config_entry, [Platform.BINARY_SENSOR]) + + assert ( + hass.states.get( + "binary_sensor.iometer_1isk0000000000_core_attachment_status" + ).state + == STATE_ON + ) + + freezer.tick(delta=timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_iometer_client.get_current_status.return_value.device.core.attachment_status = "detached" + + freezer.tick(delta=timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert ( + hass.states.get( + "binary_sensor.iometer_1isk0000000000_core_attachment_status" + ).state + == STATE_OFF + ) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_attachment_status_sensors_unkown( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_iometer_client: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test connection status sensor.""" + await setup_platform(hass, mock_config_entry, [Platform.BINARY_SENSOR]) + + assert ( + hass.states.get( + "binary_sensor.iometer_1isk0000000000_core_attachment_status" + ).state + == STATE_ON + ) + + freezer.tick(delta=timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_iometer_client.get_current_status.return_value.device.core.attachment_status = None + + freezer.tick(delta=timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert ( + hass.states.get( + "binary_sensor.iometer_1isk0000000000_core_attachment_status" + ).state + == STATE_UNKNOWN + ) diff --git a/tests/components/iometer/test_init.py b/tests/components/iometer/test_init.py index 22a20b50c60..9d8eadc5079 100644 --- a/tests/components/iometer/test_init.py +++ b/tests/components/iometer/test_init.py @@ -6,10 +6,11 @@ from unittest.mock import AsyncMock from freezegun.api import FrozenDateTimeFactory from homeassistant.components.iometer.const import DOMAIN +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from . import setup_integration +from . import setup_platform from tests.common import MockConfigEntry, async_fire_time_changed @@ -22,7 +23,8 @@ async def test_new_firmware_version( freezer: FrozenDateTimeFactory, ) -> None: """Test device registry integration.""" - await setup_integration(hass, mock_config_entry) + # await setup_integration(hass, mock_config_entry) + await setup_platform(hass, mock_config_entry, [Platform.SENSOR]) device_entry = device_registry.async_get_device( identifiers={(DOMAIN, mock_config_entry.unique_id)} ) diff --git a/tests/components/jellyfin/fixtures/get-media-folders.json b/tests/components/jellyfin/fixtures/get-media-folders.json index ff87751a9da..f6b5c1e8d78 100644 --- a/tests/components/jellyfin/fixtures/get-media-folders.json +++ b/tests/components/jellyfin/fixtures/get-media-folders.json @@ -302,8 +302,6 @@ "Album": "string", "CollectionType": "tvshows", "DisplayOrder": "string", - "AlbumId": "21af9851-8e39-43a9-9c47-513d3b9e99fc", - "AlbumPrimaryImageTag": "string", "SeriesPrimaryImageTag": "string", "AlbumArtist": "string", "AlbumArtists": [ diff --git a/tests/components/jellyfin/fixtures/sessions.json b/tests/components/jellyfin/fixtures/sessions.json index 00a1f5265db..db2b691dff0 100644 --- a/tests/components/jellyfin/fixtures/sessions.json +++ b/tests/components/jellyfin/fixtures/sessions.json @@ -4346,6 +4346,7 @@ ], "Album": "ALBUM", "AlbumId": "ALBUM-UUID", + "AlbumPrimaryImageTag": "ALBUM-PRIMARY-IMAGE-TAG", "AlbumArtist": "Album Artist", "AlbumArtists": [ { "Name": "Album Artist", "Id": "9a65b2c222ddb34e51f5cae360fad3a1" } diff --git a/tests/components/jellyfin/fixtures/user-items-parent-id.json b/tests/components/jellyfin/fixtures/user-items-parent-id.json index 2e06c30894c..cd0232894bc 100644 --- a/tests/components/jellyfin/fixtures/user-items-parent-id.json +++ b/tests/components/jellyfin/fixtures/user-items-parent-id.json @@ -302,8 +302,6 @@ "Album": "string", "CollectionType": "string", "DisplayOrder": "string", - "AlbumId": "21af9851-8e39-43a9-9c47-513d3b9e99fc", - "AlbumPrimaryImageTag": "string", "SeriesPrimaryImageTag": "string", "AlbumArtist": "string", "AlbumArtists": [ diff --git a/tests/components/jellyfin/snapshots/test_diagnostics.ambr b/tests/components/jellyfin/snapshots/test_diagnostics.ambr index c992628f034..9d73ee6397c 100644 --- a/tests/components/jellyfin/snapshots/test_diagnostics.ambr +++ b/tests/components/jellyfin/snapshots/test_diagnostics.ambr @@ -1707,6 +1707,7 @@ }), ]), 'AlbumId': 'ALBUM-UUID', + 'AlbumPrimaryImageTag': 'ALBUM-PRIMARY-IMAGE-TAG', 'ArtistItems': list([ dict({ 'Id': '1d864900526d9a9513b489f1cc28f8ca', diff --git a/tests/components/jellyfin/test_media_player.py b/tests/components/jellyfin/test_media_player.py index 3263639a32f..c6f015e9bb4 100644 --- a/tests/components/jellyfin/test_media_player.py +++ b/tests/components/jellyfin/test_media_player.py @@ -27,6 +27,7 @@ from homeassistant.components.media_player import ( from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, + ATTR_ENTITY_PICTURE, ATTR_FRIENDLY_NAME, ATTR_ICON, ) @@ -124,6 +125,10 @@ async def test_media_player_music( assert state.attributes.get(ATTR_MEDIA_SERIES_TITLE) is None assert state.attributes.get(ATTR_MEDIA_SEASON) is None assert state.attributes.get(ATTR_MEDIA_EPISODE) is None + assert ( + state.attributes.get(ATTR_ENTITY_PICTURE) + == "http://localhost/Items/ALBUM-UUID/Images/Primary.jpg" + ) entry = entity_registry.async_get(state.entity_id) assert entry diff --git a/tests/components/jewish_calendar/test_service.py b/tests/components/jewish_calendar/test_service.py new file mode 100644 index 00000000000..9eb80e5e7f0 --- /dev/null +++ b/tests/components/jewish_calendar/test_service.py @@ -0,0 +1,55 @@ +"""Test jewish calendar service.""" + +import datetime as dt + +from hdate.translator import Language +import pytest + +from homeassistant.components.jewish_calendar.const import DOMAIN +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +@pytest.mark.parametrize( + ("test_date", "nusach", "language", "expected"), + [ + pytest.param(dt.date(2025, 3, 20), "sfarad", "he", "", id="no_blessing"), + pytest.param( + dt.date(2025, 5, 20), + "ashkenaz", + "he", + "היום שבעה ושלושים יום שהם חמישה שבועות ושני ימים בעומר", + id="ahskenaz-hebrew", + ), + pytest.param( + dt.date(2025, 5, 20), + "sfarad", + "en", + "Today is the thirty-seventh day, which are five weeks and two days of the Omer", + id="sefarad-english", + ), + ], +) +async def test_get_omer_blessing( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + test_date: dt.date, + nusach: str, + language: Language, + expected: str, +) -> None: + """Test get omer blessing.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + result = await hass.services.async_call( + DOMAIN, + "count_omer", + {"date": test_date, "nusach": nusach, "language": language}, + blocking=True, + return_response=True, + ) + + assert result["message"] == expected diff --git a/tests/components/kitchen_sink/snapshots/test_init.ambr b/tests/components/kitchen_sink/snapshots/test_init.ambr index b91131eb2b0..fe22f19fb7a 100644 --- a/tests/components/kitchen_sink/snapshots/test_init.ambr +++ b/tests/components/kitchen_sink/snapshots/test_init.ambr @@ -48,5 +48,15 @@ 'type': 'no_state', }), ]), + 'sensor.statistics_issues_issue_5': list([ + dict({ + 'data': dict({ + 'metadata_mean_type': 1, + 'state_mean_type': 2, + 'statistic_id': 'sensor.statistics_issues_issue_5', + }), + 'type': 'mean_type_changed', + }), + ]), }) # --- diff --git a/tests/components/kitchen_sink/snapshots/test_sensor.ambr b/tests/components/kitchen_sink/snapshots/test_sensor.ambr index 7b433c40170..6cd9aa2e855 100644 --- a/tests/components/kitchen_sink/snapshots/test_sensor.ambr +++ b/tests/components/kitchen_sink/snapshots/test_sensor.ambr @@ -29,6 +29,20 @@ 'last_updated': , 'state': '1500', }), + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'wind_direction', + 'friendly_name': 'Statistics issues Issue 5', + 'state_class': , + 'unit_of_measurement': '°', + }), + 'context': , + 'entity_id': 'sensor.statistics_issues_issue_5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }), StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Statistics issues Issue 1', @@ -99,6 +113,20 @@ 'last_updated': , 'state': '1500', }), + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'wind_direction', + 'friendly_name': 'Statistics issues Issue 5', + 'state_class': , + 'unit_of_measurement': '°', + }), + 'context': , + 'entity_id': 'sensor.statistics_issues_issue_5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }), StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Sensor test', diff --git a/tests/components/kitchen_sink/test_init.py b/tests/components/kitchen_sink/test_init.py index 50518f89107..526801aecfa 100644 --- a/tests/components/kitchen_sink/test_init.py +++ b/tests/components/kitchen_sink/test_init.py @@ -11,6 +11,7 @@ import voluptuous as vol from homeassistant.components.kitchen_sink import DOMAIN from homeassistant.components.recorder import get_instance from homeassistant.components.recorder.statistics import ( + StatisticMeanType, async_add_external_statistics, get_last_statistics, list_statistic_ids, @@ -45,6 +46,7 @@ async def test_demo_statistics(hass: HomeAssistant) -> None: assert { "display_unit_of_measurement": "°C", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": "Outdoor temperature", "source": DOMAIN, @@ -55,6 +57,7 @@ async def test_demo_statistics(hass: HomeAssistant) -> None: assert { "display_unit_of_measurement": "kWh", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": "Energy consumption 1", "source": DOMAIN, diff --git a/tests/components/matter/fixtures/nodes/generic_switch_multi.json b/tests/components/matter/fixtures/nodes/generic_switch_multi.json index 8923198c31e..4055c9dc336 100644 --- a/tests/components/matter/fixtures/nodes/generic_switch_multi.json +++ b/tests/components/matter/fixtures/nodes/generic_switch_multi.json @@ -72,7 +72,6 @@ "1/59/0": 2, "1/59/65533": 1, "1/59/1": 0, - "1/59/2": 2, "1/59/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], "1/59/65532": 30, "1/59/65528": [], @@ -102,7 +101,7 @@ "2/59/0": 2, "2/59/65533": 1, "2/59/1": 0, - "2/59/2": 2, + "2/59/2": 4, "2/59/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], "2/59/65532": 30, "2/59/65528": [], diff --git a/tests/components/matter/snapshots/test_event.ambr b/tests/components/matter/snapshots/test_event.ambr index b0ddfaed8bf..153f5751f14 100644 --- a/tests/components/matter/snapshots/test_event.ambr +++ b/tests/components/matter/snapshots/test_event.ambr @@ -132,6 +132,8 @@ 'event_types': list([ 'multi_press_1', 'multi_press_2', + 'multi_press_3', + 'multi_press_4', 'long_press', 'long_release', ]), @@ -172,6 +174,8 @@ 'event_types': list([ 'multi_press_1', 'multi_press_2', + 'multi_press_3', + 'multi_press_4', 'long_press', 'long_release', ]), diff --git a/tests/components/matter/snapshots/test_number.ambr b/tests/components/matter/snapshots/test_number.ambr index dc35f6f2a69..e1ee782cd3b 100644 --- a/tests/components/matter/snapshots/test_number.ambr +++ b/tests/components/matter/snapshots/test_number.ambr @@ -401,8 +401,8 @@ }), 'area_id': None, 'capabilities': dict({ - 'max': 25, - 'min': -25, + 'max': 50, + 'min': -50, 'mode': , 'step': 0.5, }), @@ -439,8 +439,8 @@ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', 'friendly_name': 'Eve Thermo Temperature offset', - 'max': 25, - 'min': -25, + 'max': 50, + 'min': -50, 'mode': , 'step': 0.5, 'unit_of_measurement': , @@ -483,7 +483,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Altitude above Sea Level', + 'original_name': 'Altitude above sea level', 'platform': 'matter', 'previous_unique_id': None, 'supported_features': 0, @@ -496,7 +496,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'distance', - 'friendly_name': 'Eve Weather Altitude above Sea Level', + 'friendly_name': 'Eve Weather Altitude above sea level', 'max': 9000, 'min': 0, 'mode': , diff --git a/tests/components/matter/snapshots/test_sensor.ambr b/tests/components/matter/snapshots/test_sensor.ambr index 9caa84bbf96..cb26f1d8e70 100644 --- a/tests/components/matter/snapshots/test_sensor.ambr +++ b/tests/components/matter/snapshots/test_sensor.ambr @@ -686,7 +686,7 @@ 'state': '20.0', }) # --- -# name: test_sensors[air_purifier][sensor.air_purifier_vocs-entry] +# name: test_sensors[air_purifier][sensor.air_purifier_volatile_organic_compounds_parts-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -701,7 +701,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.air_purifier_vocs', + 'entity_id': 'sensor.air_purifier_volatile_organic_compounds_parts', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -713,7 +713,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'VOCs', + 'original_name': 'Volatile organic compounds parts', 'platform': 'matter', 'previous_unique_id': None, 'supported_features': 0, @@ -722,16 +722,16 @@ 'unit_of_measurement': 'ppm', }) # --- -# name: test_sensors[air_purifier][sensor.air_purifier_vocs-state] +# name: test_sensors[air_purifier][sensor.air_purifier_volatile_organic_compounds_parts-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'volatile_organic_compounds_parts', - 'friendly_name': 'Air Purifier VOCs', + 'friendly_name': 'Air Purifier Volatile organic compounds parts', 'state_class': , 'unit_of_measurement': 'ppm', }), 'context': , - 'entity_id': 'sensor.air_purifier_vocs', + 'entity_id': 'sensor.air_purifier_volatile_organic_compounds_parts', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1167,7 +1167,7 @@ 'state': '20.08', }) # --- -# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_vocs-entry] +# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_volatile_organic_compounds_parts-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1182,7 +1182,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_vocs', + 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_volatile_organic_compounds_parts', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -1194,7 +1194,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'VOCs', + 'original_name': 'Volatile organic compounds parts', 'platform': 'matter', 'previous_unique_id': None, 'supported_features': 0, @@ -1203,16 +1203,16 @@ 'unit_of_measurement': 'ppm', }) # --- -# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_vocs-state] +# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_volatile_organic_compounds_parts-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'volatile_organic_compounds_parts', - 'friendly_name': 'lightfi-aq1-air-quality-sensor VOCs', + 'friendly_name': 'lightfi-aq1-air-quality-sensor Volatile organic compounds parts', 'state_class': , 'unit_of_measurement': 'ppm', }), 'context': , - 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_vocs', + 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_volatile_organic_compounds_parts', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/matter/test_event.py b/tests/components/matter/test_event.py index f3a318c4e8b..651c71a5dce 100644 --- a/tests/components/matter/test_event.py +++ b/tests/components/matter/test_event.py @@ -36,7 +36,7 @@ async def test_generic_switch_node( assert state assert state.state == "unknown" assert state.name == "Mock Generic Switch Button" - # check event_types from featuremap 30 + # check event_types from featuremap 14 (0b1110) assert state.attributes[ATTR_EVENT_TYPES] == [ "initial_press", "short_release", @@ -76,7 +76,7 @@ async def test_generic_switch_multi_node( assert state_button_1.state == "unknown" # name should be 'DeviceName Button (1)' due to the label set to just '1' assert state_button_1.name == "Mock Generic Switch Button (1)" - # check event_types from featuremap 14 + # check event_types from featuremap 30 (0b11110) and MultiPressMax unset (default 2) assert state_button_1.attributes[ATTR_EVENT_TYPES] == [ "multi_press_1", "multi_press_2", @@ -84,11 +84,20 @@ async def test_generic_switch_multi_node( "long_release", ] # check button 2 - state_button_1 = hass.states.get("event.mock_generic_switch_fancy_button") - assert state_button_1 - assert state_button_1.state == "unknown" + state_button_2 = hass.states.get("event.mock_generic_switch_fancy_button") + assert state_button_2 + assert state_button_2.state == "unknown" # name should be 'DeviceName Fancy Button' due to the label set to 'Fancy Button' - assert state_button_1.name == "Mock Generic Switch Fancy Button" + assert state_button_2.name == "Mock Generic Switch Fancy Button" + # check event_types from featuremap 30 (0b11110) and MultiPressMax 4 + assert state_button_2.attributes[ATTR_EVENT_TYPES] == [ + "multi_press_1", + "multi_press_2", + "multi_press_3", + "multi_press_4", + "long_press", + "long_release", + ] # trigger firing a multi press event await trigger_subscription_callback( diff --git a/tests/components/meteo_france/conftest.py b/tests/components/meteo_france/conftest.py index eb28ec0a838..82b220e331e 100644 --- a/tests/components/meteo_france/conftest.py +++ b/tests/components/meteo_france/conftest.py @@ -24,8 +24,8 @@ def patch_requests(): mock_data.get_rain.return_value = Rain( load_json_object_fixture("raw_rain.json", DOMAIN) ) - mock_data.get_warning_current_phenomenoms.return_value = CurrentPhenomenons( - load_json_object_fixture("raw_warning_current_phenomenoms.json", DOMAIN) + mock_data.get_warning_current_phenomenons.return_value = CurrentPhenomenons( + load_json_object_fixture("raw_warning_current_phenomenons.json", DOMAIN) ) yield mock_data diff --git a/tests/components/meteo_france/fixtures/raw_warning_current_phenomenoms.json b/tests/components/meteo_france/fixtures/raw_warning_current_phenomenons.json similarity index 100% rename from tests/components/meteo_france/fixtures/raw_warning_current_phenomenoms.json rename to tests/components/meteo_france/fixtures/raw_warning_current_phenomenons.json diff --git a/tests/components/mqtt/common.py b/tests/components/mqtt/common.py index f000c4e0b9b..e4a368f0d71 100644 --- a/tests/components/mqtt/common.py +++ b/tests/components/mqtt/common.py @@ -70,9 +70,8 @@ MOCK_SUBENTRY_NOTIFY_COMPONENT1 = { "363a7ecad6be4a19b939a016ea93e994": { "platform": "notify", "name": "Milkman alert", - "qos": 0, "command_topic": "test-topic", - "command_template": "{{ value_json.value }}", + "command_template": "{{ value }}", "entity_picture": "https://example.com/363a7ecad6be4a19b939a016ea93e994", "retain": False, }, @@ -81,7 +80,6 @@ MOCK_SUBENTRY_NOTIFY_COMPONENT2 = { "6494827dac294fa0827c54b02459d309": { "platform": "notify", "name": "The second notifier", - "qos": 0, "command_topic": "test-topic2", "entity_picture": "https://example.com/6494827dac294fa0827c54b02459d309", }, @@ -89,14 +87,58 @@ MOCK_SUBENTRY_NOTIFY_COMPONENT2 = { MOCK_SUBENTRY_NOTIFY_COMPONENT_NO_NAME = { "5269352dd9534c908d22812ea5d714cd": { "platform": "notify", - "qos": 0, "command_topic": "test-topic", - "command_template": "{{ value_json.value }}", + "command_template": "{{ value }}", "entity_picture": "https://example.com/5269352dd9534c908d22812ea5d714cd", "retain": False, }, } +MOCK_SUBENTRY_SENSOR_COMPONENT = { + "e9261f6feed443e7b7d5f3fbe2a47412": { + "platform": "sensor", + "name": "Energy", + "device_class": "enum", + "state_topic": "test-topic", + "options": ["low", "medium", "high"], + "expire_after": 30, + "value_template": "{{ value_json.value }}", + "entity_picture": "https://example.com/e9261f6feed443e7b7d5f3fbe2a47412", + }, +} +MOCK_SUBENTRY_SENSOR_COMPONENT_STATE_CLASS = { + "a0f85790a95d4889924602effff06b6e": { + "platform": "sensor", + "name": "Energy", + "state_class": "measurement", + "state_topic": "test-topic", + "entity_picture": "https://example.com/a0f85790a95d4889924602effff06b6e", + }, +} +MOCK_SUBENTRY_SENSOR_COMPONENT_LAST_RESET = { + "e9261f6feed443e7b7d5f3fbe2a47412": { + "platform": "sensor", + "name": "Energy", + "state_class": "total", + "last_reset_value_template": "{{ value_json.value }}", + "state_topic": "test-topic", + "entity_picture": "https://example.com/e9261f6feed443e7b7d5f3fbe2a47412", + }, +} +MOCK_SUBENTRY_SWITCH_COMPONENT = { + "3faf1318016c46c5aea26707eeb6f12e": { + "platform": "switch", + "name": "Outlet", + "device_class": "outlet", + "command_topic": "test-topic", + "state_topic": "test-topic", + "command_template": "{{ value }}", + "value_template": "{{ value_json.value }}", + "entity_picture": "https://example.com/3faf1318016c46c5aea26707eeb6f12e", + "optimistic": True, + }, +} + # Bogus light component just for code coverage # Note that light cannot be setup through the UI yet # The test is for code coverage @@ -104,7 +146,6 @@ MOCK_SUBENTRY_LIGHT_COMPONENT = { "8131babc5e8d4f44b82e0761d39091a2": { "platform": "light", "name": "Test light", - "qos": 1, "command_topic": "test-topic4", "schema": "basic", "entity_picture": "https://example.com/8131babc5e8d4f44b82e0761d39091a2", @@ -114,7 +155,6 @@ MOCK_SUBENTRY_NOTIFY_BAD_SCHEMA = { "b10b531e15244425a74bb0abb1e9d2c6": { "platform": "notify", "name": "Test", - "qos": 1, "command_topic": "bad#topic", }, } @@ -148,10 +188,11 @@ MOCK_NOTIFY_SUBENTRY_DATA_SINGLE = { "model": "Model XL", "model_id": "mn002", "configuration_url": "https://example.com", + "mqtt_settings": {"qos": 1}, }, "components": MOCK_SUBENTRY_NOTIFY_COMPONENT1, } -MOCK_SUBENTRY_DATA_NOTIFY_NO_NAME = { +MOCK_NOTIFY_SUBENTRY_DATA_NO_NAME = { "device": { "name": "Milk notifier", "sw_version": "1.0", @@ -162,7 +203,50 @@ MOCK_SUBENTRY_DATA_NOTIFY_NO_NAME = { }, "components": MOCK_SUBENTRY_NOTIFY_COMPONENT_NO_NAME, } - +MOCK_SENSOR_SUBENTRY_DATA_SINGLE = { + "device": { + "name": "Test sensor", + "sw_version": "1.0", + "hw_version": "2.1 rev a", + "model": "Model XL", + "model_id": "mn002", + "configuration_url": "https://example.com", + }, + "components": MOCK_SUBENTRY_SENSOR_COMPONENT, +} +MOCK_SENSOR_SUBENTRY_DATA_SINGLE_STATE_CLASS = { + "device": { + "name": "Test sensor", + "sw_version": "1.0", + "hw_version": "2.1 rev a", + "model": "Model XL", + "model_id": "mn002", + "configuration_url": "https://example.com", + }, + "components": MOCK_SUBENTRY_SENSOR_COMPONENT_STATE_CLASS, +} +MOCK_SENSOR_SUBENTRY_DATA_SINGLE_LAST_RESET_TEMPLATE = { + "device": { + "name": "Test sensor", + "sw_version": "1.0", + "hw_version": "2.1 rev a", + "model": "Model XL", + "model_id": "mn002", + "configuration_url": "https://example.com", + }, + "components": MOCK_SUBENTRY_SENSOR_COMPONENT_LAST_RESET, +} +MOCK_SWITCH_SUBENTRY_DATA_SINGLE_STATE_CLASS = { + "device": { + "name": "Test switch", + "sw_version": "1.0", + "hw_version": "2.1 rev a", + "model": "Model XL", + "model_id": "mn002", + "configuration_url": "https://example.com", + }, + "components": MOCK_SUBENTRY_SWITCH_COMPONENT, +} MOCK_SUBENTRY_DATA_BAD_COMPONENT_SCHEMA = { "device": { "name": "Milk notifier", @@ -185,7 +269,8 @@ MOCK_SUBENTRY_DATA_SET_MIX = { }, "components": MOCK_SUBENTRY_NOTIFY_COMPONENT1 | MOCK_SUBENTRY_NOTIFY_COMPONENT2 - | MOCK_SUBENTRY_LIGHT_COMPONENT, + | MOCK_SUBENTRY_LIGHT_COMPONENT + | MOCK_SUBENTRY_SWITCH_COMPONENT, } | MOCK_SUBENTRY_AVAILABILITY_DATA _SENTINEL = object() diff --git a/tests/components/mqtt/test_config_flow.py b/tests/components/mqtt/test_config_flow.py index 354cb33ba39..c94d692b374 100644 --- a/tests/components/mqtt/test_config_flow.py +++ b/tests/components/mqtt/test_config_flow.py @@ -18,6 +18,7 @@ from homeassistant import config_entries from homeassistant.components import mqtt from homeassistant.components.hassio import AddonError from homeassistant.components.mqtt.config_flow import PWD_NOT_CHANGED +from homeassistant.components.mqtt.util import learn_more_url from homeassistant.config_entries import ConfigSubentry, ConfigSubentryData from homeassistant.const import ( CONF_CLIENT_ID, @@ -33,8 +34,12 @@ from homeassistant.helpers.service_info.hassio import HassioServiceInfo from .common import ( MOCK_NOTIFY_SUBENTRY_DATA_MULTI, + MOCK_NOTIFY_SUBENTRY_DATA_NO_NAME, MOCK_NOTIFY_SUBENTRY_DATA_SINGLE, - MOCK_SUBENTRY_DATA_NOTIFY_NO_NAME, + MOCK_SENSOR_SUBENTRY_DATA_SINGLE, + MOCK_SENSOR_SUBENTRY_DATA_SINGLE_LAST_RESET_TEMPLATE, + MOCK_SENSOR_SUBENTRY_DATA_SINGLE_STATE_CLASS, + MOCK_SWITCH_SUBENTRY_DATA_SINGLE_STATE_CLASS, ) from tests.common import MockConfigEntry, MockMqttReasonCode @@ -2612,54 +2617,176 @@ async def test_migrate_of_incompatible_config_entry( @pytest.mark.parametrize( ( "config_subentries_data", + "mock_device_user_input", "mock_entity_user_input", + "mock_entity_details_user_input", + "mock_entity_details_failed_user_input", "mock_mqtt_user_input", "mock_failed_mqtt_user_input", - "mock_failed_mqtt_user_input_errors", "entity_name", ), [ ( MOCK_NOTIFY_SUBENTRY_DATA_SINGLE, + {"name": "Milk notifier", "mqtt_settings": {"qos": 1}}, {"name": "Milkman alert"}, + None, + None, { "command_topic": "test-topic", - "command_template": "{{ value_json.value }}", - "qos": 0, + "command_template": "{{ value }}", "retain": False, }, - {"command_topic": "test-topic#invalid"}, - {"command_topic": "invalid_publish_topic"}, + ( + ( + {"command_topic": "test-topic#invalid"}, + {"command_topic": "invalid_publish_topic"}, + ), + ), "Milk notifier Milkman alert", ), ( - MOCK_SUBENTRY_DATA_NOTIFY_NO_NAME, + MOCK_NOTIFY_SUBENTRY_DATA_NO_NAME, + {"name": "Milk notifier", "mqtt_settings": {"qos": 0}}, {}, + None, + None, { "command_topic": "test-topic", - "command_template": "{{ value_json.value }}", - "qos": 0, + "command_template": "{{ value }}", "retain": False, }, - {"command_topic": "test-topic#invalid"}, - {"command_topic": "invalid_publish_topic"}, + ( + ( + {"command_topic": "test-topic#invalid"}, + {"command_topic": "invalid_publish_topic"}, + ), + ), "Milk notifier", ), + ( + MOCK_SENSOR_SUBENTRY_DATA_SINGLE, + {"name": "Test sensor", "mqtt_settings": {"qos": 0}}, + {"name": "Energy"}, + {"device_class": "enum", "options": ["low", "medium", "high"]}, + ( + ( + { + "device_class": "energy", + "unit_of_measurement": "ppm", + }, + {"unit_of_measurement": "invalid_uom"}, + ), + # Trigger options to be shown on the form + ( + {"device_class": "enum"}, + {"options": "options_with_enum_device_class"}, + ), + # Test options are only allowed with device_class enum + ( + { + "device_class": "energy", + "options": ["less", "more"], + }, + { + "device_class": "options_device_class_enum", + "unit_of_measurement": "uom_required_for_device_class", + }, + ), + # Include options again to allow flow with valid data + ( + {"device_class": "enum"}, + {"options": "options_with_enum_device_class"}, + ), + ( + { + "device_class": "enum", + "state_class": "measurement", + "options": ["less", "more"], + }, + {"options": "options_not_allowed_with_state_class_or_uom"}, + ), + ), + { + "state_topic": "test-topic", + "value_template": "{{ value_json.value }}", + "advanced_settings": {"expire_after": 30}, + }, + ( + ( + {"state_topic": "test-topic#invalid"}, + {"state_topic": "invalid_subscribe_topic"}, + ), + ), + "Test sensor Energy", + ), + ( + MOCK_SENSOR_SUBENTRY_DATA_SINGLE_STATE_CLASS, + {"name": "Test sensor", "mqtt_settings": {"qos": 0}}, + {"name": "Energy"}, + { + "state_class": "measurement", + }, + (), + { + "state_topic": "test-topic", + }, + (), + "Test sensor Energy", + ), + ( + MOCK_SWITCH_SUBENTRY_DATA_SINGLE_STATE_CLASS, + {"name": "Test switch", "mqtt_settings": {"qos": 0}}, + {"name": "Outlet"}, + {"device_class": "outlet"}, + (), + { + "command_topic": "test-topic", + "command_template": "{{ value }}", + "state_topic": "test-topic", + "value_template": "{{ value_json.value }}", + "optimistic": True, + }, + ( + ( + {"command_topic": "test-topic#invalid"}, + {"command_topic": "invalid_publish_topic"}, + ), + ( + { + "command_topic": "test-topic", + "state_topic": "test-topic#invalid", + }, + {"state_topic": "invalid_subscribe_topic"}, + ), + ), + "Test switch Outlet", + ), + ], + ids=[ + "notify_with_entity_name", + "notify_no_entity_name", + "sensor_options", + "sensor_total", + "switch", ], - ids=["notify_with_entity_name", "notify_no_entity_name"], ) async def test_subentry_configflow( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, config_subentries_data: dict[str, Any], + mock_device_user_input: dict[str, Any], mock_entity_user_input: dict[str, Any], + mock_entity_details_user_input: dict[str, Any], + mock_entity_details_failed_user_input: tuple[ + tuple[dict[str, Any], dict[str, str]], + ], mock_mqtt_user_input: dict[str, Any], - mock_failed_mqtt_user_input: dict[str, Any], - mock_failed_mqtt_user_input_errors: dict[str, Any], + mock_failed_mqtt_user_input: tuple[tuple[dict[str, Any], dict[str, str]],], entity_name: str, ) -> None: """Test the subentry ConfigFlow.""" - device_name = config_subentries_data["device"]["name"] + device_name = mock_device_user_input["name"] component = next(iter(config_subentries_data["components"].values())) await mqtt_mock_entry() @@ -2686,14 +2813,7 @@ async def test_subentry_configflow( result = await hass.config_entries.subentries.async_configure( result["flow_id"], - user_input={ - "name": device_name, - "sw_version": "1.0", - "hw_version": "2.1 rev a", - "model": "Model XL", - "model_id": "mn002", - "configuration_url": "https://example.com", - }, + user_input=mock_device_user_input, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "entity" @@ -2723,23 +2843,55 @@ async def test_subentry_configflow( | mock_entity_user_input, ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "mqtt_platform_config" assert result["errors"] == {} assert result["description_placeholders"] == { - "mqtt_device": "Milk notifier", - "platform": "notify", + "mqtt_device": device_name, + "platform": component["platform"], "entity": entity_name, + "url": learn_more_url(component["platform"]), } - # Process entity platform config flow + # Process extra step if the platform supports it + if mock_entity_details_user_input is not None: + # Extra entity details flow step + assert result["step_id"] == "entity_platform_config" - # Test an invalid mqtt user_input case - result = await hass.config_entries.subentries.async_configure( - result["flow_id"], - user_input=mock_failed_mqtt_user_input, - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == mock_failed_mqtt_user_input_errors + # First test validators if set of test + for failed_user_input, failed_errors in mock_entity_details_failed_user_input: + # Test an invalid entity details user input case + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=failed_user_input, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == failed_errors + + # Now try again with valid data + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=mock_entity_details_user_input, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + assert result["description_placeholders"] == { + "mqtt_device": device_name, + "platform": component["platform"], + "entity": entity_name, + "url": learn_more_url(component["platform"]), + } + else: + # No details form step + assert result["step_id"] == "mqtt_platform_config" + + # Process mqtt platform config flow + # Test an invalid mqtt user input case + for failed_user_input, failed_errors in mock_failed_mqtt_user_input: + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=failed_user_input, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == failed_errors # Try again with a valid configuration result = await hass.config_entries.subentries.async_configure( @@ -2756,6 +2908,10 @@ async def test_subentry_configflow( iter(config_subentries_data["components"].values()) ) + subentry_device_data = next(iter(config_entry.subentries.values())).data["device"] + for option, value in mock_device_user_input.items(): + assert subentry_device_data[option] == value + await hass.async_block_till_done() @@ -2799,8 +2955,12 @@ async def test_subentry_reconfigure_remove_entity( assert len(components) == 2 object_list = list(components) component_list = list(components.values()) - entity_name_0 = f"{device.name} {component_list[0]['name']}" - entity_name_1 = f"{device.name} {component_list[1]['name']}" + entity_name_0 = ( + f"{device.name} {component_list[0]['name']} ({component_list[0]['platform']})" + ) + entity_name_1 = ( + f"{device.name} {component_list[1]['name']} ({component_list[1]['platform']})" + ) for key, component in components.items(): unique_entity_id = f"{subentry_id}_{key}" @@ -2920,8 +3080,12 @@ async def test_subentry_reconfigure_edit_entity_multi_entitites( assert len(components) == 2 object_list = list(components) component_list = list(components.values()) - entity_name_0 = f"{device.name} {component_list[0]['name']}" - entity_name_1 = f"{device.name} {component_list[1]['name']}" + entity_name_0 = ( + f"{device.name} {component_list[0]['name']} ({component_list[0]['platform']})" + ) + entity_name_1 = ( + f"{device.name} {component_list[1]['name']} ({component_list[1]['platform']})" + ) for key in components: unique_entity_id = f"{subentry_id}_{key}" @@ -3000,7 +3164,13 @@ async def test_subentry_reconfigure_edit_entity_multi_entitites( @pytest.mark.parametrize( - ("mqtt_config_subentries_data", "user_input_mqtt"), + ( + "mqtt_config_subentries_data", + "user_input_platform_config_validation", + "user_input_platform_config", + "user_input_mqtt", + "removed_options", + ), [ ( ( @@ -3010,21 +3180,66 @@ async def test_subentry_reconfigure_edit_entity_multi_entitites( title="Mock subentry", ), ), + (), + None, { "command_topic": "test-topic1-updated", - "command_template": "{{ value_json.value }}", + "command_template": "{{ value }}", "retain": True, }, - ) + {"entity_picture"}, + ), + ( + ( + ConfigSubentryData( + data=MOCK_SENSOR_SUBENTRY_DATA_SINGLE, + subentry_type="device", + title="Mock subentry", + ), + ), + ( + ( + { + "device_class": "battery", + "options": [], + "state_class": "measurement", + "unit_of_measurement": "invalid", + }, + # Allow to accept options are being removed + { + "device_class": "options_device_class_enum", + "options": "options_not_allowed_with_state_class_or_uom", + "unit_of_measurement": "invalid_uom", + }, + ), + ), + { + "device_class": "battery", + "state_class": "measurement", + "unit_of_measurement": "%", + "advanced_settings": {"suggested_display_precision": 1}, + }, + { + "state_topic": "test-topic1-updated", + "value_template": "{{ value_json.value }}", + }, + {"options", "expire_after", "entity_picture"}, + ), ], - ids=["notify"], + ids=["notify", "sensor"], ) async def test_subentry_reconfigure_edit_entity_single_entity( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, + user_input_platform_config_validation: tuple[ + tuple[dict[str, Any], dict[str, str] | None], ... + ] + | None, + user_input_platform_config: dict[str, Any] | None, user_input_mqtt: dict[str, Any], + removed_options: tuple[str, ...], ) -> None: """Test the subentry ConfigFlow reconfigure with single entity.""" await mqtt_mock_entry() @@ -3081,7 +3296,28 @@ async def test_subentry_reconfigure_edit_entity_single_entity( user_input={}, ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "mqtt_platform_config" + + if user_input_platform_config is None: + # Skip entity flow step + assert result["step_id"] == "mqtt_platform_config" + else: + # Additional entity flow step + assert result["step_id"] == "entity_platform_config" + for entity_validation_config, errors in user_input_platform_config_validation: + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=entity_validation_config, + ) + assert result["step_id"] == "entity_platform_config" + assert result.get("errors") == errors + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=user_input_platform_config, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "mqtt_platform_config" # submit the new platform specific entity data, result = await hass.config_entries.subentries.async_configure( @@ -3110,6 +3346,142 @@ async def test_subentry_reconfigure_edit_entity_single_entity( for key, value in user_input_mqtt.items(): assert new_components[component_id][key] == value + assert set(component) - set(new_components[component_id]) == removed_options + + +@pytest.mark.parametrize( + ( + "mqtt_config_subentries_data", + "user_input_entity_details", + "user_input_mqtt", + "filtered_out_fields", + ), + [ + ( + ( + ConfigSubentryData( + data=MOCK_SENSOR_SUBENTRY_DATA_SINGLE_LAST_RESET_TEMPLATE, + subentry_type="device", + title="Mock subentry", + ), + ), + { + "state_class": "measurement", + }, + { + "state_topic": "test-topic", + }, + ("last_reset_value_template",), + ), + ], + ids=["sensor_last_reset_template"], +) +async def test_subentry_reconfigure_edit_entity_reset_fields( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + user_input_entity_details: dict[str, Any], + user_input_mqtt: dict[str, Any], + filtered_out_fields: tuple[str, ...], +) -> None: + """Test the subentry ConfigFlow reconfigure resets filtered out fields.""" + await mqtt_mock_entry() + config_entry: MockConfigEntry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + subentry_id: str + subentry: ConfigSubentry + subentry_id, subentry = next(iter(config_entry.subentries.items())) + result = await config_entry.start_subentry_reconfigure_flow( + hass, "device", subentry_id + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + + # assert we have a device for the subentry + device = device_registry.async_get_device(identifiers={(mqtt.DOMAIN, subentry_id)}) + assert device is not None + + # assert we have an entity for the subentry component + components = deepcopy(dict(subentry.data))["components"] + assert len(components) == 1 + + component_id, component = next(iter(components.items())) + for field in filtered_out_fields: + assert field in component + + unique_entity_id = f"{subentry_id}_{component_id}" + entity_id = entity_registry.async_get_entity_id( + domain=component["platform"], platform=mqtt.DOMAIN, unique_id=unique_entity_id + ) + assert entity_id is not None + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry is not None + assert entity_entry.config_subentry_id == subentry_id + + # assert menu options, we do not have the option to delete an entity + # we have no option to save and finish yet + assert result["menu_options"] == [ + "entity", + "update_entity", + "device", + "availability", + ] + + # assert we can update the entity, there is no select step + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "update_entity"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "entity" + + # submit the new common entity data, reset entity_picture + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input={}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "entity_platform_config" + + # submit the new entity platform config + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=user_input_entity_details, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "mqtt_platform_config" + + # submit the new platform specific mqtt data, + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=user_input_mqtt, + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + + # finish reconfigure flow + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "save_changes"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + # Check we still have out components + new_components = deepcopy(dict(subentry.data))["components"] + assert len(new_components) == 1 + + # Check our update was successful + assert "entity_picture" not in new_components[component_id] + + # Check the second component was updated + for key, value in user_input_mqtt.items(): + assert new_components[component_id][key] == value + + # Check field are filtered out correctly + for field in filtered_out_fields: + assert field not in new_components[component_id] + @pytest.mark.parametrize( ("mqtt_config_subentries_data", "user_input_entity", "user_input_mqtt"), @@ -3129,7 +3501,6 @@ async def test_subentry_reconfigure_edit_entity_single_entity( }, { "command_topic": "test-topic2", - "qos": 0, }, ) ], diff --git a/tests/components/mqtt/test_mixins.py b/tests/components/mqtt/test_mixins.py index 2049dec0437..fa30283962b 100644 --- a/tests/components/mqtt/test_mixins.py +++ b/tests/components/mqtt/test_mixins.py @@ -1,7 +1,7 @@ """The tests for shared code of the MQTT platform.""" from typing import Any -from unittest.mock import patch +from unittest.mock import call, patch import pytest @@ -21,7 +21,11 @@ from homeassistant.helpers import ( ) from homeassistant.util import slugify -from .common import MOCK_SUBENTRY_DATA_BAD_COMPONENT_SCHEMA, MOCK_SUBENTRY_DATA_SET_MIX +from .common import ( + MOCK_NOTIFY_SUBENTRY_DATA_SINGLE, + MOCK_SUBENTRY_DATA_BAD_COMPONENT_SCHEMA, + MOCK_SUBENTRY_DATA_SET_MIX, +) from tests.common import MockConfigEntry, async_capture_events, async_fire_mqtt_message from tests.typing import MqttMockHAClientGenerator @@ -547,3 +551,39 @@ async def test_loading_subentry_with_bad_component_schema( "Schema violation occurred when trying to set up entity from subentry" in caplog.text ) + + +@pytest.mark.parametrize( + "mqtt_config_subentries_data", + [ + ( + ConfigSubentryData( + data=MOCK_NOTIFY_SUBENTRY_DATA_SINGLE, + subentry_type="device", + title="Mock subentry", + ), + ) + ], +) +async def test_qos_on_mqt_device_from_subentry( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + mqtt_config_subentries_data: tuple[dict[str, Any]], + device_registry: dr.DeviceRegistry, +) -> None: + """Test QoS is set correctly on entities from MQTT device.""" + mqtt_mock = await mqtt_mock_entry() + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + subentry_id = next(iter(entry.subentries)) + # Each subentry has one device + device = device_registry.async_get_device({("mqtt", subentry_id)}) + assert device is not None + assert hass.states.get("notify.milk_notifier_milkman_alert") is not None + await hass.services.async_call( + "notify", + "send_message", + {"entity_id": "notify.milk_notifier_milkman_alert", "message": "Test message"}, + ) + await hass.async_block_till_done() + assert len(mqtt_mock.async_publish.mock_calls) == 1 + mqtt_mock.async_publish.mock_calls[0] = call("test-topic", "Test message", 1, False) diff --git a/tests/components/music_assistant/fixtures/players.json b/tests/components/music_assistant/fixtures/players.json index 8a08a55dc45..e8978f17f86 100644 --- a/tests/components/music_assistant/fixtures/players.json +++ b/tests/components/music_assistant/fixtures/players.json @@ -34,12 +34,16 @@ "needs_poll": false, "poll_interval": 30, "enabled": true, - "hidden": false, "icon": "mdi-speaker", "group_volume": 20, "display_name": "Test Player 1", - "extra_data": {}, - "announcement_in_progress": false + "power_control": "native", + "volume_control": "native", + "mute_control": "native", + "hide_player_in_ui": ["when_unavailable"], + "expose_to_ha": true, + "can_group_with": ["00:00:00:00:00:02"], + "source_list": [] }, { "player_id": "00:00:00:00:00:02", @@ -83,15 +87,27 @@ }, "synced_to": null, "enabled_by_default": true, - "needs_poll": false, - "poll_interval": 30, "enabled": true, "hidden": false, "icon": "mdi-speaker", "group_volume": 20, "display_name": "My Super Test Player 2", - "extra_data": {}, - "announcement_in_progress": false + "power_control": "native", + "volume_control": "native", + "mute_control": "native", + "hide_player_in_ui": ["when_unavailable"], + "expose_to_ha": true, + "can_group_with": ["00:00:00:00:00:01"], + "source_list": [ + { + "id": "spotify", + "name": "Spotify Connect", + "passive": true, + "can_play_pause": false, + "can_seek": false, + "can_next_previous": false + } + ] }, { "player_id": "test_group_player_1", @@ -135,15 +151,17 @@ }, "synced_to": null, "enabled_by_default": true, - "needs_poll": true, - "poll_interval": 30, "enabled": true, - "hidden": false, "icon": "mdi-speaker-multiple", "group_volume": 6, "display_name": "Test Group Player 1", - "extra_data": {}, - "announcement_in_progress": false + "power_control": "native", + "volume_control": "native", + "mute_control": "native", + "hide_player_in_ui": ["when_unavailable"], + "expose_to_ha": true, + "can_group_with": [], + "source_list": [] } ] } diff --git a/tests/components/music_assistant/test_media_player.py b/tests/components/music_assistant/test_media_player.py index 44317d4977a..ad321a1cc29 100644 --- a/tests/components/music_assistant/test_media_player.py +++ b/tests/components/music_assistant/test_media_player.py @@ -694,19 +694,6 @@ async def test_media_player_supported_features( assert state assert state.attributes["supported_features"] == expected_features - # remove pause capability from player, trigger subscription callback - # and check if the supported features got updated - music_assistant_client.players._players[mass_player_id].supported_features.remove( - PlayerFeature.PAUSE - ) - await trigger_subscription_callback( - hass, music_assistant_client, EventType.PLAYER_CONFIG_UPDATED, mass_player_id - ) - expected_features &= ~MediaPlayerEntityFeature.PAUSE - state = hass.states.get(entity_id) - assert state - assert state.attributes["supported_features"] == expected_features - # remove grouping capability from player, trigger subscription callback # and check if the supported features got updated music_assistant_client.players._players[mass_player_id].supported_features.remove( diff --git a/tests/components/nest/conftest.py b/tests/components/nest/conftest.py index 92d90a18a7e..b4b94efce5b 100644 --- a/tests/components/nest/conftest.py +++ b/tests/components/nest/conftest.py @@ -144,13 +144,14 @@ async def auth( return FakeAuth(aioclient_mock, create_device, device_access_project_id) -@pytest.fixture(autouse=True) -def cleanup_media_storage(hass: HomeAssistant) -> Generator[None]: +@pytest.fixture(autouse=True, name="media_path") +def cleanup_media_storage(hass: HomeAssistant) -> Generator[str]: """Test cleanup, remove any media storage persisted during the test.""" tmp_path = str(uuid.uuid4()) with patch("homeassistant.components.nest.media_source.MEDIA_PATH", new=tmp_path): - yield - shutil.rmtree(hass.config.path(tmp_path), ignore_errors=True) + full_path = hass.config.path(tmp_path) + yield full_path + shutil.rmtree(full_path, ignore_errors=True) @pytest.fixture diff --git a/tests/components/nest/test_media_source.py b/tests/components/nest/test_media_source.py index d009e1185da..0b0654fc69c 100644 --- a/tests/components/nest/test_media_source.py +++ b/tests/components/nest/test_media_source.py @@ -8,11 +8,13 @@ from collections.abc import Generator import datetime from http import HTTPStatus import io +import pathlib from typing import Any from unittest.mock import patch import aiohttp import av +from freezegun import freeze_time import numpy as np import pytest @@ -39,7 +41,7 @@ from .common import ( ) from .conftest import FakeAuth -from tests.common import MockUser, async_capture_events +from tests.common import MockUser, async_capture_events, async_fire_time_changed from tests.typing import ClientSessionGenerator DOMAIN = "nest" @@ -1574,3 +1576,80 @@ async def test_event_clip_media_attachment( response = await client.get(content_path) assert response.status == HTTPStatus.OK, f"Response not matched: {response}" await response.read() + + +@pytest.mark.parametrize(("device_traits", "cache_size"), [(BATTERY_CAMERA_TRAITS, 5)]) +async def test_remove_stale_media( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + auth, + mp4, + hass_client: ClientSessionGenerator, + subscriber, + setup_platform, + media_path: str, +) -> None: + """Test media files getting evicted from the cache.""" + await setup_platform() + + device = device_registry.async_get_device(identifiers={(DOMAIN, DEVICE_ID)}) + assert device + assert device.name == DEVICE_NAME + + # Publish a media event + auth.responses = [ + aiohttp.web.Response(body=mp4.getvalue()), + ] + event_timestamp = dt_util.now() + await subscriber.async_receive_event( + create_event_message( + create_battery_event_data(MOTION_EVENT), + timestamp=event_timestamp, + ) + ) + await hass.async_block_till_done() + + # The first subdirectory is the device id. Media for events are stored in the + # device subdirectory. First verify that the media was persisted. We will + # then add additional media files, then invoke the garbage collector, and + # then verify orphaned files are removed. + storage_path = pathlib.Path(media_path) + device_path = storage_path / device.id + media_files = list(device_path.glob("*")) + assert len(media_files) == 1 + event_media = media_files[0] + assert event_media.name.endswith(".mp4") + + event_time1 = datetime.datetime.now(datetime.UTC) - datetime.timedelta(days=8) + extra_media1 = ( + device_path / f"{int(event_time1.timestamp())}-camera_motion-test.mp4" + ) + extra_media1.write_bytes(mp4.getvalue()) + event_time2 = event_time1 + datetime.timedelta(hours=20) + extra_media2 = ( + device_path / f"{int(event_time2.timestamp())}-camera_motion-test.jpg" + ) + extra_media2.write_bytes(mp4.getvalue()) + # This event will not be garbage collected because it is too recent + event_time3 = datetime.datetime.now(datetime.UTC) - datetime.timedelta(days=3) + extra_media3 = ( + device_path / f"{int(event_time3.timestamp())}-camera_motion-test.mp4" + ) + extra_media3.write_bytes(mp4.getvalue()) + + assert len(list(device_path.glob("*"))) == 4 + + # Advance the clock to invoke the garbage collector. This will remove extra + # files that are not valid events that are old enough. + point_in_time = datetime.datetime.now(datetime.UTC) + datetime.timedelta(days=1) + with freeze_time(point_in_time): + async_fire_time_changed(hass, point_in_time) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Verify that the event media is still present and that the extra files + # are removed. Newer media is not removed. + assert event_media.exists() + assert not extra_media1.exists() + assert not extra_media2.exists() + assert extra_media3.exists() diff --git a/tests/components/netatmo/snapshots/test_sensor.ambr b/tests/components/netatmo/snapshots/test_sensor.ambr index b149e80fa5b..00285f565a6 100644 --- a/tests/components/netatmo/snapshots/test_sensor.ambr +++ b/tests/components/netatmo/snapshots/test_sensor.ambr @@ -1501,7 +1501,7 @@ }), 'area_id': None, 'capabilities': dict({ - 'state_class': , + 'state_class': , }), 'config_entry_id': , 'config_subentry_id': , @@ -1520,7 +1520,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Gust angle', 'platform': 'netatmo', @@ -1535,10 +1535,11 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'attribution': 'Data provided by Netatmo', + 'device_class': 'wind_direction', 'friendly_name': 'Home avg Gust angle', 'latitude': 32.17901225, 'longitude': -117.17901225, - 'state_class': , + 'state_class': , 'unit_of_measurement': '°', }), 'context': , @@ -1659,60 +1660,6 @@ 'state': '63.2', }) # --- -# name: test_entity[sensor.home_avg_none-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'config_subentry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.home_avg_none', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'Home-avg-windangle_value', - 'unit_of_measurement': '°', - }) -# --- -# name: test_entity[sensor.home_avg_none-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'friendly_name': 'Home avg None', - 'latitude': 32.17901225, - 'longitude': -117.17901225, - 'state_class': , - 'unit_of_measurement': '°', - }), - 'context': , - 'entity_id': 'sensor.home_avg_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '17.0', - }) -# --- # name: test_entity[sensor.home_avg_precipitation-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1939,6 +1886,61 @@ 'state': '22.7', }) # --- +# name: test_entity[sensor.home_avg_wind_direction-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_avg_wind_direction', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wind direction', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'Home-avg-windangle_value', + 'unit_of_measurement': '°', + }) +# --- +# name: test_entity[sensor.home_avg_wind_direction-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'wind_direction', + 'friendly_name': 'Home avg Wind direction', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': '°', + }), + 'context': , + 'entity_id': 'sensor.home_avg_wind_direction', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '17.0', + }) +# --- # name: test_entity[sensor.home_avg_wind_speed-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2061,7 +2063,7 @@ }), 'area_id': None, 'capabilities': dict({ - 'state_class': , + 'state_class': , }), 'config_entry_id': , 'config_subentry_id': , @@ -2080,7 +2082,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Gust angle', 'platform': 'netatmo', @@ -2095,10 +2097,11 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'attribution': 'Data provided by Netatmo', + 'device_class': 'wind_direction', 'friendly_name': 'Home max Gust angle', 'latitude': 32.17901225, 'longitude': -117.17901225, - 'state_class': , + 'state_class': , 'unit_of_measurement': '°', }), 'context': , @@ -2219,60 +2222,6 @@ 'state': '76', }) # --- -# name: test_entity[sensor.home_max_none-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'config_subentry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.home_max_none', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'Home-max-windangle_value', - 'unit_of_measurement': '°', - }) -# --- -# name: test_entity[sensor.home_max_none-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'friendly_name': 'Home max None', - 'latitude': 32.17901225, - 'longitude': -117.17901225, - 'state_class': , - 'unit_of_measurement': '°', - }), - 'context': , - 'entity_id': 'sensor.home_max_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '17', - }) -# --- # name: test_entity[sensor.home_max_precipitation-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2499,6 +2448,61 @@ 'state': '27.4', }) # --- +# name: test_entity[sensor.home_max_wind_direction-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_max_wind_direction', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wind direction', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'Home-max-windangle_value', + 'unit_of_measurement': '°', + }) +# --- +# name: test_entity[sensor.home_max_wind_direction-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'wind_direction', + 'friendly_name': 'Home max Wind direction', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': '°', + }), + 'context': , + 'entity_id': 'sensor.home_max_wind_direction', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '17', + }) +# --- # name: test_entity[sensor.home_max_wind_speed-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2621,7 +2625,7 @@ }), 'area_id': None, 'capabilities': dict({ - 'state_class': , + 'state_class': , }), 'config_entry_id': , 'config_subentry_id': , @@ -2640,7 +2644,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Gust angle', 'platform': 'netatmo', @@ -2655,10 +2659,11 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'attribution': 'Data provided by Netatmo', + 'device_class': 'wind_direction', 'friendly_name': 'Home min Gust angle', 'latitude': 32.17901225, 'longitude': -117.17901225, - 'state_class': , + 'state_class': , 'unit_of_measurement': '°', }), 'context': , @@ -2779,60 +2784,6 @@ 'state': '56', }) # --- -# name: test_entity[sensor.home_min_none-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'config_subentry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.home_min_none', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'Home-min-windangle_value', - 'unit_of_measurement': '°', - }) -# --- -# name: test_entity[sensor.home_min_none-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'friendly_name': 'Home min None', - 'latitude': 32.17901225, - 'longitude': -117.17901225, - 'state_class': , - 'unit_of_measurement': '°', - }), - 'context': , - 'entity_id': 'sensor.home_min_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '17', - }) -# --- # name: test_entity[sensor.home_min_precipitation-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -3059,6 +3010,61 @@ 'state': '19.8', }) # --- +# name: test_entity[sensor.home_min_wind_direction-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_min_wind_direction', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wind direction', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'Home-min-windangle_value', + 'unit_of_measurement': '°', + }) +# --- +# name: test_entity[sensor.home_min_wind_direction-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'wind_direction', + 'friendly_name': 'Home min Wind direction', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': '°', + }), + 'context': , + 'entity_id': 'sensor.home_min_wind_direction', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '17', + }) +# --- # name: test_entity[sensor.home_min_wind_speed-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -6253,7 +6259,7 @@ }), 'area_id': None, 'capabilities': dict({ - 'state_class': , + 'state_class': , }), 'config_entry_id': , 'config_subentry_id': , @@ -6272,7 +6278,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Gust angle', 'platform': 'netatmo', @@ -6287,8 +6293,9 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'attribution': 'Data provided by Netatmo', + 'device_class': 'wind_direction', 'friendly_name': 'Villa Garden Gust angle', - 'state_class': , + 'state_class': , 'unit_of_measurement': '°', }), 'context': , @@ -6524,7 +6531,7 @@ }), 'area_id': None, 'capabilities': dict({ - 'state_class': , + 'state_class': , }), 'config_entry_id': , 'config_subentry_id': , @@ -6543,7 +6550,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Wind angle', 'platform': 'netatmo', @@ -6558,8 +6565,9 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'attribution': 'Data provided by Netatmo', + 'device_class': 'wind_direction', 'friendly_name': 'Villa Garden Wind angle', - 'state_class': , + 'state_class': , 'unit_of_measurement': '°', }), 'context': , diff --git a/tests/components/nut/test_device_action.py b/tests/components/nut/test_device_action.py index 01675f928e3..ea6b7306a5f 100644 --- a/tests/components/nut/test_device_action.py +++ b/tests/components/nut/test_device_action.py @@ -15,6 +15,7 @@ from homeassistant.components.nut import DOMAIN from homeassistant.components.nut.const import INTEGRATION_SUPPORTED_COMMANDS from homeassistant.const import CONF_DEVICE_ID, CONF_TYPE from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component @@ -191,48 +192,39 @@ async def test_action(hass: HomeAssistant, device_registry: dr.DeviceRegistry) - run_command.assert_called_with("someUps", "beeper.disable") -async def test_rund_command_exception( +async def test_run_command_exception( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - caplog: pytest.LogCaptureFixture, ) -> None: - """Test logged error if run command raises exception.""" + """Test if run command raises exception with translation.""" - list_commands_return_value = {"beeper.enable": None} - error_message = "Something wrong happened" - run_command = AsyncMock(side_effect=NUTError(error_message)) + command_name = "beeper.enable" + nut_error_message = "Something wrong happened" + run_command = AsyncMock(side_effect=NUTError(nut_error_message)) await async_init_integration( hass, list_vars={"ups.status": "OL"}, - list_commands_return_value=list_commands_return_value, + list_ups={"ups1": "UPS 1"}, + list_commands_return_value={command_name: None}, run_command=run_command, ) device_entry = next(device for device in device_registry.devices.values()) - assert await async_setup_component( - hass, - automation.DOMAIN, - { - automation.DOMAIN: [ - { - "trigger": { - "platform": "event", - "event_type": "test_some_event", - }, - "action": { - "domain": DOMAIN, - "device_id": device_entry.id, - "type": "beeper_enable", - }, - }, - ] - }, + platform = await device_automation.async_get_device_automation_platform( + hass, DOMAIN, DeviceAutomationType.ACTION ) - hass.bus.async_fire("test_some_event") - await hass.async_block_till_done() - - assert error_message in caplog.text + error_message = f"Error running command {command_name}, {nut_error_message}" + with pytest.raises(HomeAssistantError, match=error_message): + await platform.async_call_action_from_config( + hass, + { + CONF_TYPE: command_name, + CONF_DEVICE_ID: device_entry.id, + }, + {}, + None, + ) async def test_action_exception_invalid_device(hass: HomeAssistant) -> None: @@ -248,10 +240,12 @@ async def test_action_exception_invalid_device(hass: HomeAssistant) -> None: hass, DOMAIN, DeviceAutomationType.ACTION ) - with pytest.raises(InvalidDeviceAutomationConfig): + device_id = "invalid_device_id" + error_message = f"Unable to find a NUT device with ID {device_id}" + with pytest.raises(InvalidDeviceAutomationConfig, match=error_message): await platform.async_call_action_from_config( hass, - {CONF_TYPE: "beeper.enable", CONF_DEVICE_ID: "invalid_device_id"}, + {CONF_TYPE: "beeper.enable", CONF_DEVICE_ID: device_id}, {}, None, ) diff --git a/tests/components/nut/test_init.py b/tests/components/nut/test_init.py index 0585696cef2..4f11ffb5bb0 100644 --- a/tests/components/nut/test_init.py +++ b/tests/components/nut/test_init.py @@ -4,6 +4,7 @@ from copy import deepcopy from unittest.mock import patch from aionut import NUTError, NUTLoginError +import pytest from homeassistant.components.nut.const import DOMAIN from homeassistant.config_entries import ConfigEntryState @@ -56,7 +57,10 @@ async def test_async_setup_entry(hass: HomeAssistant) -> None: assert not hass.data.get(DOMAIN) -async def test_config_not_ready(hass: HomeAssistant) -> None: +async def test_config_not_ready( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: """Test for setup failure if connection to broker is missing.""" entry = MockConfigEntry( domain=DOMAIN, @@ -64,6 +68,8 @@ async def test_config_not_ready(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) + nut_error_message = "Something wrong happened" + error_message = f"Error fetching UPS state: {nut_error_message}" with ( patch( "homeassistant.components.nut.AIONUTClient.list_ups", @@ -71,15 +77,20 @@ async def test_config_not_ready(hass: HomeAssistant) -> None: ), patch( "homeassistant.components.nut.AIONUTClient.list_vars", - side_effect=NUTError, + side_effect=NUTError(nut_error_message), ), ): await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() assert entry.state is ConfigEntryState.SETUP_RETRY + assert error_message in caplog.text -async def test_auth_fails(hass: HomeAssistant) -> None: + +async def test_auth_fails( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: """Test for setup failure if auth has changed.""" entry = MockConfigEntry( domain=DOMAIN, @@ -87,6 +98,8 @@ async def test_auth_fails(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) + nut_error_message = "Something wrong happened" + error_message = f"Device authentication error: {nut_error_message}" with ( patch( "homeassistant.components.nut.AIONUTClient.list_ups", @@ -94,13 +107,15 @@ async def test_auth_fails(hass: HomeAssistant) -> None: ), patch( "homeassistant.components.nut.AIONUTClient.list_vars", - side_effect=NUTLoginError, + side_effect=NUTLoginError(nut_error_message), ), ): await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() assert entry.state is ConfigEntryState.SETUP_ERROR + assert error_message in caplog.text + flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 assert flows[0]["context"]["source"] == "reauth" diff --git a/tests/components/nut/test_sensor.py b/tests/components/nut/test_sensor.py index cdec6c5083b..89f06c934f8 100644 --- a/tests/components/nut/test_sensor.py +++ b/tests/components/nut/test_sensor.py @@ -7,16 +7,20 @@ import pytest from homeassistant.components.nut.const import DOMAIN from homeassistant.components.sensor import SensorDeviceClass, SensorStateClass from homeassistant.const import ( + ATTR_DEVICE_CLASS, + ATTR_FRIENDLY_NAME, + ATTR_UNIT_OF_MEASUREMENT, CONF_HOST, CONF_PORT, CONF_RESOURCES, PERCENTAGE, STATE_UNKNOWN, + Platform, UnitOfElectricCurrent, UnitOfElectricPotential, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import entity_registry as er, translation from .util import ( _get_mock_nutclient, @@ -53,9 +57,9 @@ async def test_ups_devices( assert state.state == "100" expected_attributes = { - "device_class": "battery", - "friendly_name": "Ups1 Battery charge", - "unit_of_measurement": PERCENTAGE, + ATTR_DEVICE_CLASS: "battery", + ATTR_FRIENDLY_NAME: "Ups1 Battery charge", + ATTR_UNIT_OF_MEASUREMENT: PERCENTAGE, } # Only test for a subset of attributes in case # HA changes the implementation and a new one appears @@ -88,9 +92,9 @@ async def test_ups_devices_with_unique_ids( assert state.state == "100" expected_attributes = { - "device_class": "battery", - "friendly_name": "Ups1 Battery charge", - "unit_of_measurement": PERCENTAGE, + ATTR_DEVICE_CLASS: "battery", + ATTR_FRIENDLY_NAME: "Ups1 Battery charge", + ATTR_UNIT_OF_MEASUREMENT: PERCENTAGE, } # Only test for a subset of attributes in case # HA changes the implementation and a new one appears @@ -126,10 +130,10 @@ async def test_pdu_devices_with_unique_ids( device_id="sensor.ups1_input_voltage", state_value="122.91", expected_attributes={ - "device_class": SensorDeviceClass.VOLTAGE, + ATTR_DEVICE_CLASS: SensorDeviceClass.VOLTAGE, "state_class": SensorStateClass.MEASUREMENT, - "friendly_name": "Ups1 Input voltage", - "unit_of_measurement": UnitOfElectricPotential.VOLT, + ATTR_FRIENDLY_NAME: "Ups1 Input voltage", + ATTR_UNIT_OF_MEASUREMENT: UnitOfElectricPotential.VOLT, }, ) @@ -141,8 +145,8 @@ async def test_pdu_devices_with_unique_ids( device_id="sensor.ups1_ambient_humidity_status", state_value="good", expected_attributes={ - "device_class": SensorDeviceClass.ENUM, - "friendly_name": "Ups1 Ambient humidity status", + ATTR_DEVICE_CLASS: SensorDeviceClass.ENUM, + ATTR_FRIENDLY_NAME: "Ups1 Ambient humidity status", }, ) @@ -154,8 +158,8 @@ async def test_pdu_devices_with_unique_ids( device_id="sensor.ups1_ambient_temperature_status", state_value="good", expected_attributes={ - "device_class": SensorDeviceClass.ENUM, - "friendly_name": "Ups1 Ambient temperature status", + ATTR_DEVICE_CLASS: SensorDeviceClass.ENUM, + ATTR_FRIENDLY_NAME: "Ups1 Ambient temperature status", }, ) @@ -246,6 +250,36 @@ async def test_stale_options( assert state.state == "10" +async def test_state_ambient_translation(hass: HomeAssistant) -> None: + """Test translation of ambient state sensor.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={CONF_HOST: "mock", CONF_PORT: "mock"}, + ) + entry.add_to_hass(hass) + + mock_pynut = _get_mock_nutclient( + list_ups={"ups1": "UPS 1"}, list_vars={"ambient.humidity.status": "good"} + ) + + with patch( + "homeassistant.components.nut.AIONUTClient", + return_value=mock_pynut, + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + key = "ambient_humidity_status" + state = hass.states.get(f"sensor.ups1_{key}") + assert state.state == "good" + + result = translation.async_translate_state( + hass, state.state, Platform.SENSOR, DOMAIN, key, None + ) + + assert result == "Good" + + @pytest.mark.parametrize( ("model", "unique_id_base"), [ @@ -305,9 +339,9 @@ async def test_pdu_dynamic_outlets( device_id="sensor.ups1_outlet_a1_current", state_value="0", expected_attributes={ - "device_class": SensorDeviceClass.CURRENT, - "friendly_name": "Ups1 Outlet A1 current", - "unit_of_measurement": UnitOfElectricCurrent.AMPERE, + ATTR_DEVICE_CLASS: SensorDeviceClass.CURRENT, + ATTR_FRIENDLY_NAME: "Ups1 Outlet A1 current", + ATTR_UNIT_OF_MEASUREMENT: UnitOfElectricCurrent.AMPERE, }, ) @@ -319,9 +353,9 @@ async def test_pdu_dynamic_outlets( device_id="sensor.ups1_outlet_a24_current", state_value="0.19", expected_attributes={ - "device_class": SensorDeviceClass.CURRENT, - "friendly_name": "Ups1 Outlet A24 current", - "unit_of_measurement": UnitOfElectricCurrent.AMPERE, + ATTR_DEVICE_CLASS: SensorDeviceClass.CURRENT, + ATTR_FRIENDLY_NAME: "Ups1 Outlet A24 current", + ATTR_UNIT_OF_MEASUREMENT: UnitOfElectricCurrent.AMPERE, }, ) diff --git a/tests/components/ohme/test_sensor.py b/tests/components/ohme/test_sensor.py index 21f9f06f963..8fc9edddcf9 100644 --- a/tests/components/ohme/test_sensor.py +++ b/tests/components/ohme/test_sensor.py @@ -5,6 +5,7 @@ from unittest.mock import MagicMock, patch from freezegun.api import FrozenDateTimeFactory from ohme import ApiException +import pytest from syrupy import SnapshotAssertion from homeassistant.const import STATE_UNAVAILABLE, Platform @@ -16,6 +17,7 @@ from . import setup_integration from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, diff --git a/tests/components/onkyo/test_config_flow.py b/tests/components/onkyo/test_config_flow.py index 28186503ead..92a4a34e8fb 100644 --- a/tests/components/onkyo/test_config_flow.py +++ b/tests/components/onkyo/test_config_flow.py @@ -1,12 +1,10 @@ """Test Onkyo config flow.""" -from typing import Any from unittest.mock import patch import pytest from homeassistant import config_entries -from homeassistant.components.onkyo import InputSource from homeassistant.components.onkyo.config_flow import OnkyoConfigFlow from homeassistant.components.onkyo.const import ( DOMAIN, @@ -536,89 +534,6 @@ async def test_reconfigure_new_device(hass: HomeAssistant) -> None: assert config_entry.unique_id == old_unique_id -@pytest.mark.parametrize( - ("user_input", "exception", "error"), - [ - ( - # No host, and thus no host reachable - { - CONF_HOST: None, - "receiver_max_volume": 100, - "max_volume": 100, - "sources": {}, - }, - None, - "cannot_connect", - ), - ( - # No host, and connection exception - { - CONF_HOST: None, - "receiver_max_volume": 100, - "max_volume": 100, - "sources": {}, - }, - Exception(), - "cannot_connect", - ), - ], -) -async def test_import_fail( - hass: HomeAssistant, - user_input: dict[str, Any], - exception: Exception, - error: str, -) -> None: - """Test import flow failed.""" - - with patch( - "homeassistant.components.onkyo.receiver.pyeiscp.Connection.discover", - side_effect=exception, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=user_input - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == error - - -async def test_import_success( - hass: HomeAssistant, -) -> None: - """Test import flow succeeded.""" - info = create_receiver_info(1) - - user_input = { - CONF_HOST: info.host, - "receiver_max_volume": 80, - "max_volume": 110, - "sources": { - InputSource("00"): "Auxiliary", - InputSource("01"): "Video", - }, - "info": info, - } - - import_result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=user_input - ) - await hass.async_block_till_done() - - assert import_result["type"] is FlowResultType.CREATE_ENTRY - assert import_result["data"] == {"host": "host 1"} - assert import_result["options"] == { - "volume_resolution": 80, - "max_volume": 100, - "input_sources": { - "00": "Auxiliary", - "01": "Video", - }, - "listening_modes": {}, - } - - @pytest.mark.parametrize( "ignore_missing_translations", [ diff --git a/tests/components/openai_conversation/test_config_flow.py b/tests/components/openai_conversation/test_config_flow.py index 90a08471f39..17a5aad6478 100644 --- a/tests/components/openai_conversation/test_config_flow.py +++ b/tests/components/openai_conversation/test_config_flow.py @@ -1,9 +1,10 @@ """Test the OpenAI Conversation config flow.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, patch -from httpx import Response +import httpx from openai import APIConnectionError, AuthenticationError, BadRequestError +from openai.types.responses import Response, ResponseOutputMessage, ResponseOutputText import pytest from homeassistant import config_entries @@ -16,6 +17,13 @@ from homeassistant.components.openai_conversation.const import ( CONF_RECOMMENDED, CONF_TEMPERATURE, CONF_TOP_P, + CONF_WEB_SEARCH, + CONF_WEB_SEARCH_CITY, + CONF_WEB_SEARCH_CONTEXT_SIZE, + CONF_WEB_SEARCH_COUNTRY, + CONF_WEB_SEARCH_REGION, + CONF_WEB_SEARCH_TIMEZONE, + CONF_WEB_SEARCH_USER_LOCATION, DOMAIN, RECOMMENDED_CHAT_MODEL, RECOMMENDED_MAX_TOKENS, @@ -117,13 +125,17 @@ async def test_options_unsupported_model( (APIConnectionError(request=None), "cannot_connect"), ( AuthenticationError( - response=Response(status_code=None, request=""), body=None, message=None + response=httpx.Response(status_code=None, request=""), + body=None, + message=None, ), "invalid_auth", ), ( BadRequestError( - response=Response(status_code=None, request=""), body=None, message=None + response=httpx.Response(status_code=None, request=""), + body=None, + message=None, ), "unknown", ), @@ -172,6 +184,9 @@ async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> Non CONF_TOP_P: RECOMMENDED_TOP_P, CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, CONF_REASONING_EFFORT: RECOMMENDED_REASONING_EFFORT, + CONF_WEB_SEARCH: False, + CONF_WEB_SEARCH_CONTEXT_SIZE: "medium", + CONF_WEB_SEARCH_USER_LOCATION: False, }, ), ( @@ -183,6 +198,9 @@ async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> Non CONF_TOP_P: RECOMMENDED_TOP_P, CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, CONF_REASONING_EFFORT: RECOMMENDED_REASONING_EFFORT, + CONF_WEB_SEARCH: False, + CONF_WEB_SEARCH_CONTEXT_SIZE: "medium", + CONF_WEB_SEARCH_USER_LOCATION: False, }, { CONF_RECOMMENDED: True, @@ -225,3 +243,105 @@ async def test_options_switching( await hass.async_block_till_done() assert options["type"] is FlowResultType.CREATE_ENTRY assert options["data"] == expected_options + + +async def test_options_web_search_user_location( + hass: HomeAssistant, mock_config_entry, mock_init_component +) -> None: + """Test fetching user location.""" + options_flow = await hass.config_entries.options.async_init( + mock_config_entry.entry_id + ) + hass.config.country = "US" + hass.config.time_zone = "America/Los_Angeles" + hass.states.async_set( + "zone.home", "0", {"latitude": 37.7749, "longitude": -122.4194} + ) + with patch( + "openai.resources.responses.AsyncResponses.create", + new_callable=AsyncMock, + ) as mock_create: + mock_create.return_value = Response( + object="response", + id="resp_A", + created_at=1700000000, + model="gpt-4o-mini", + parallel_tool_calls=True, + tool_choice="auto", + tools=[], + output=[ + ResponseOutputMessage( + type="message", + id="msg_A", + content=[ + ResponseOutputText( + type="output_text", + text='{"city": "San Francisco", "region": "California"}', + annotations=[], + ) + ], + role="assistant", + status="completed", + ) + ], + ) + + options = await hass.config_entries.options.async_configure( + options_flow["flow_id"], + { + CONF_RECOMMENDED: False, + CONF_PROMPT: "Speak like a pirate", + CONF_TEMPERATURE: 1.0, + CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL, + CONF_TOP_P: RECOMMENDED_TOP_P, + CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, + CONF_REASONING_EFFORT: RECOMMENDED_REASONING_EFFORT, + CONF_WEB_SEARCH: True, + CONF_WEB_SEARCH_CONTEXT_SIZE: "medium", + CONF_WEB_SEARCH_USER_LOCATION: True, + }, + ) + await hass.async_block_till_done() + assert ( + mock_create.call_args.kwargs["input"][0]["content"] == "Where are the following" + " coordinates located: (37.7749, -122.4194)?" + ) + assert options["type"] is FlowResultType.CREATE_ENTRY + assert options["data"] == { + CONF_RECOMMENDED: False, + CONF_PROMPT: "Speak like a pirate", + CONF_TEMPERATURE: 1.0, + CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL, + CONF_TOP_P: RECOMMENDED_TOP_P, + CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, + CONF_REASONING_EFFORT: RECOMMENDED_REASONING_EFFORT, + CONF_WEB_SEARCH: True, + CONF_WEB_SEARCH_CONTEXT_SIZE: "medium", + CONF_WEB_SEARCH_USER_LOCATION: True, + CONF_WEB_SEARCH_CITY: "San Francisco", + CONF_WEB_SEARCH_REGION: "California", + CONF_WEB_SEARCH_COUNTRY: "US", + CONF_WEB_SEARCH_TIMEZONE: "America/Los_Angeles", + } + + +async def test_options_web_search_unsupported_model( + hass: HomeAssistant, mock_config_entry, mock_init_component +) -> None: + """Test the options form giving error about web search not being available.""" + options_flow = await hass.config_entries.options.async_init( + mock_config_entry.entry_id + ) + result = await hass.config_entries.options.async_configure( + options_flow["flow_id"], + { + CONF_RECOMMENDED: False, + CONF_PROMPT: "Speak like a pirate", + CONF_CHAT_MODEL: "o1-pro", + CONF_LLM_HASS_API: "assist", + CONF_WEB_SEARCH: True, + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"web_search": "web_search_not_supported"} diff --git a/tests/components/openai_conversation/test_conversation.py b/tests/components/openai_conversation/test_conversation.py index bfcacefb044..d6f09e0f30e 100644 --- a/tests/components/openai_conversation/test_conversation.py +++ b/tests/components/openai_conversation/test_conversation.py @@ -12,9 +12,14 @@ from openai.types.responses import ( ResponseContentPartAddedEvent, ResponseContentPartDoneEvent, ResponseCreatedEvent, + ResponseError, + ResponseErrorEvent, + ResponseFailedEvent, ResponseFunctionCallArgumentsDeltaEvent, ResponseFunctionCallArgumentsDoneEvent, ResponseFunctionToolCall, + ResponseFunctionWebSearch, + ResponseIncompleteEvent, ResponseInProgressEvent, ResponseOutputItemAddedEvent, ResponseOutputItemDoneEvent, @@ -25,12 +30,25 @@ from openai.types.responses import ( ResponseTextConfig, ResponseTextDeltaEvent, ResponseTextDoneEvent, + ResponseWebSearchCallCompletedEvent, + ResponseWebSearchCallInProgressEvent, + ResponseWebSearchCallSearchingEvent, ) +from openai.types.responses.response import IncompleteDetails import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components import conversation from homeassistant.components.homeassistant.exposed_entities import async_expose_entity +from homeassistant.components.openai_conversation.const import ( + CONF_WEB_SEARCH, + CONF_WEB_SEARCH_CITY, + CONF_WEB_SEARCH_CONTEXT_SIZE, + CONF_WEB_SEARCH_COUNTRY, + CONF_WEB_SEARCH_REGION, + CONF_WEB_SEARCH_TIMEZONE, + CONF_WEB_SEARCH_USER_LOCATION, +) from homeassistant.const import CONF_LLM_HASS_API from homeassistant.core import Context, HomeAssistant from homeassistant.helpers import intent @@ -83,17 +101,40 @@ def mock_create_stream() -> Generator[AsyncMock]: response=response, type="response.in_progress", ) + response.status = "completed" for value in events: if isinstance(value, ResponseOutputItemDoneEvent): response.output.append(value.item) + elif isinstance(value, IncompleteDetails): + response.status = "incomplete" + response.incomplete_details = value + break + if isinstance(value, ResponseError): + response.status = "failed" + response.error = value + break + yield value - response.status = "completed" - yield ResponseCompletedEvent( - response=response, - type="response.completed", - ) + if isinstance(value, ResponseErrorEvent): + return + + if response.status == "incomplete": + yield ResponseIncompleteEvent( + response=response, + type="response.incomplete", + ) + elif response.status == "failed": + yield ResponseFailedEvent( + response=response, + type="response.failed", + ) + else: + yield ResponseCompletedEvent( + response=response, + type="response.completed", + ) with patch( "openai.resources.responses.AsyncResponses.create", @@ -175,6 +216,121 @@ async def test_error_handling( assert result.response.speech["plain"]["speech"] == message, result.response.speech +@pytest.mark.parametrize( + ("reason", "message"), + [ + ( + "max_output_tokens", + "max output tokens reached", + ), + ( + "content_filter", + "content filter triggered", + ), + ( + None, + "unknown reason", + ), + ], +) +async def test_incomplete_response( + hass: HomeAssistant, + mock_config_entry_with_assist: MockConfigEntry, + mock_init_component, + mock_create_stream: AsyncMock, + reason: str, + message: str, +) -> None: + """Test handling early model stop.""" + # Incomplete details received after some content is generated + mock_create_stream.return_value = [ + ( + # Start message + *create_message_item( + id="msg_A", + text=["Once upon", " a time, ", "there was "], + output_index=0, + ), + # Length limit or content filter + IncompleteDetails(reason=reason), + ) + ] + + result = await conversation.async_converse( + hass, + "Please tell me a big story", + "mock-conversation-id", + Context(), + agent_id="conversation.openai", + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR, result + assert ( + result.response.speech["plain"]["speech"] + == f"OpenAI response incomplete: {message}" + ), result.response.speech + + # Incomplete details received before any content is generated + mock_create_stream.return_value = [ + ( + # Start generating response + *create_reasoning_item(id="rs_A", output_index=0), + # Length limit or content filter + IncompleteDetails(reason=reason), + ) + ] + + result = await conversation.async_converse( + hass, + "please tell me a big story", + "mock-conversation-id", + Context(), + agent_id="conversation.openai", + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR, result + assert ( + result.response.speech["plain"]["speech"] + == f"OpenAI response incomplete: {message}" + ), result.response.speech + + +@pytest.mark.parametrize( + ("error", "message"), + [ + ( + ResponseError(code="rate_limit_exceeded", message="Rate limit exceeded"), + "OpenAI response failed: Rate limit exceeded", + ), + ( + ResponseErrorEvent(type="error", message="Some error"), + "OpenAI response error: Some error", + ), + ], +) +async def test_failed_response( + hass: HomeAssistant, + mock_config_entry_with_assist: MockConfigEntry, + mock_init_component, + mock_create_stream: AsyncMock, + error: ResponseError | ResponseErrorEvent, + message: str, +) -> None: + """Test handling failed and error responses.""" + mock_create_stream.return_value = [(error,)] + + result = await conversation.async_converse( + hass, + "next natural number please", + "mock-conversation-id", + Context(), + agent_id="conversation.openai", + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR, result + assert result.response.speech["plain"]["speech"] == message, result.response.speech + + async def test_conversation_agent( hass: HomeAssistant, mock_config_entry: MockConfigEntry, @@ -346,6 +502,41 @@ def create_reasoning_item(id: str, output_index: int) -> list[ResponseStreamEven ] +def create_web_search_item(id: str, output_index: int) -> list[ResponseStreamEvent]: + """Create a web search call item.""" + return [ + ResponseOutputItemAddedEvent( + item=ResponseFunctionWebSearch( + id=id, status="in_progress", type="web_search_call" + ), + output_index=output_index, + type="response.output_item.added", + ), + ResponseWebSearchCallInProgressEvent( + item_id=id, + output_index=output_index, + type="response.web_search_call.in_progress", + ), + ResponseWebSearchCallSearchingEvent( + item_id=id, + output_index=output_index, + type="response.web_search_call.searching", + ), + ResponseWebSearchCallCompletedEvent( + item_id=id, + output_index=output_index, + type="response.web_search_call.completed", + ), + ResponseOutputItemDoneEvent( + item=ResponseFunctionWebSearch( + id=id, status="completed", type="web_search_call" + ), + output_index=output_index, + type="response.output_item.done", + ), + ] + + async def test_function_call( hass: HomeAssistant, mock_config_entry_with_assist: MockConfigEntry, @@ -436,7 +627,6 @@ async def test_function_call_invalid( mock_config_entry_with_assist: MockConfigEntry, mock_init_component, mock_create_stream: AsyncMock, - mock_chat_log: MockChatLog, # noqa: F811 description: str, messages: tuple[ResponseStreamEvent], ) -> None: @@ -488,3 +678,60 @@ async def test_assist_api_tools_conversion( tools = mock_create_stream.mock_calls[0][2]["tools"] assert tools + + +async def test_web_search( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, + mock_create_stream, + mock_chat_log: MockChatLog, # noqa: F811 +) -> None: + """Test web_search_tool.""" + hass.config_entries.async_update_entry( + mock_config_entry, + options={ + **mock_config_entry.options, + CONF_WEB_SEARCH: True, + CONF_WEB_SEARCH_CONTEXT_SIZE: "low", + CONF_WEB_SEARCH_USER_LOCATION: True, + CONF_WEB_SEARCH_CITY: "San Francisco", + CONF_WEB_SEARCH_COUNTRY: "US", + CONF_WEB_SEARCH_REGION: "California", + CONF_WEB_SEARCH_TIMEZONE: "America/Los_Angeles", + }, + ) + await hass.config_entries.async_reload(mock_config_entry.entry_id) + + message = "Home Assistant now supports ChatGPT Search in Assist" + mock_create_stream.return_value = [ + # Initial conversation + ( + *create_web_search_item(id="ws_A", output_index=0), + *create_message_item(id="msg_A", text=message, output_index=1), + ) + ] + + result = await conversation.async_converse( + hass, + "What's on the latest news?", + mock_chat_log.conversation_id, + Context(), + agent_id="conversation.openai", + ) + + assert mock_create_stream.mock_calls[0][2]["tools"] == [ + { + "type": "web_search_preview", + "search_context_size": "low", + "user_location": { + "type": "approximate", + "city": "San Francisco", + "region": "California", + "country": "US", + "timezone": "America/Los_Angeles", + }, + } + ] + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert result.response.speech["plain"]["speech"] == message, result.response.speech diff --git a/tests/components/openai_conversation/test_init.py b/tests/components/openai_conversation/test_init.py index 5aef68841ee..c4d5605de03 100644 --- a/tests/components/openai_conversation/test_init.py +++ b/tests/components/openai_conversation/test_init.py @@ -262,6 +262,27 @@ async def test_init_error( }, 0, ), + ( + {"prompt": "Picture of a dog", "filenames": ["/a/b/c.pdf"]}, + { + "input": [ + { + "content": [ + { + "type": "input_text", + "text": "Picture of a dog", + }, + { + "type": "input_file", + "file_data": "data:application/pdf;base64,BASE64IMAGE1", + "filename": "/a/b/c.pdf", + }, + ], + }, + ], + }, + 1, + ), ( {"prompt": "Picture of a dog", "filenames": ["/a/b/c.jpg"]}, { @@ -415,8 +436,8 @@ async def test_generate_content_service( [True, False], ), ( - {"prompt": "Not a picture of a dog", "filenames": ["/a/b/c.pdf"]}, - "Only images are supported by the OpenAI API,`/a/b/c.pdf` is not an image file", + {"prompt": "Not a picture of a dog", "filenames": ["/a/b/c.mov"]}, + "Only images and PDF are supported by the OpenAI API,`/a/b/c.mov` is not an image file or PDF", 1, [True], [True], diff --git a/tests/components/pglab/test_cover.py b/tests/components/pglab/test_cover.py new file mode 100644 index 00000000000..ea4c7a7213e --- /dev/null +++ b/tests/components/pglab/test_cover.py @@ -0,0 +1,210 @@ +"""The tests for the PG LAB Electronics cover.""" + +import json + +from homeassistant.components import cover +from homeassistant.components.cover import ( + DOMAIN as COVER_DOMAIN, + SERVICE_CLOSE_COVER, + SERVICE_OPEN_COVER, + SERVICE_STOP_COVER, +) +from homeassistant.const import ( + ATTR_ASSUMED_STATE, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, + STATE_UNKNOWN, +) +from homeassistant.core import HomeAssistant + +from tests.common import async_fire_mqtt_message +from tests.typing import MqttMockHAClient + +COVER_FEATURES = ( + cover.CoverEntityFeature.OPEN + | cover.CoverEntityFeature.CLOSE + | cover.CoverEntityFeature.STOP +) + + +async def call_service(hass: HomeAssistant, entity_id, service, **kwargs): + """Call a service.""" + await hass.services.async_call( + COVER_DOMAIN, + service, + {"entity_id": entity_id, **kwargs}, + blocking=True, + ) + + +async def test_cover_features( + hass: HomeAssistant, mqtt_mock: MqttMockHAClient, setup_pglab +) -> None: + """Test cover features.""" + topic = "pglab/discovery/E-Board-DD53AC85/config" + payload = { + "ip": "192.168.1.16", + "mac": "80:34:28:1B:18:5A", + "name": "test", + "hw": "1.0.7", + "fw": "1.0.0", + "type": "E-Board", + "id": "E-Board-DD53AC85", + "manufacturer": "PG LAB Electronics", + "params": {"shutters": 4, "boards": "10000000"}, + } + + async_fire_mqtt_message( + hass, + topic, + json.dumps(payload), + ) + await hass.async_block_till_done() + + assert len(hass.states.async_all("cover")) == 4 + + for i in range(4): + cover = hass.states.get(f"cover.test_shutter_{i}") + assert cover + assert cover.attributes["supported_features"] == COVER_FEATURES + + +async def test_cover_availability( + hass: HomeAssistant, mqtt_mock: MqttMockHAClient, setup_pglab +) -> None: + """Check if covers are properly created.""" + topic = "pglab/discovery/E-Board-DD53AC85/config" + payload = { + "ip": "192.168.1.16", + "mac": "80:34:28:1B:18:5A", + "name": "test", + "hw": "1.0.7", + "fw": "1.0.0", + "type": "E-Board", + "id": "E-Board-DD53AC85", + "manufacturer": "PG LAB Electronics", + "params": {"shutters": 6, "boards": "11000000"}, + } + + async_fire_mqtt_message( + hass, + topic, + json.dumps(payload), + ) + await hass.async_block_till_done() + + # We are creating 6 covers using two E-RELAY devices connected to E-BOARD. + # Now we are going to check if all covers are created and their state is unknown. + for i in range(5): + cover = hass.states.get(f"cover.test_shutter_{i}") + assert cover.state == STATE_UNKNOWN + assert not cover.attributes.get(ATTR_ASSUMED_STATE) + + # The cover with id 7 should not be created. + cover = hass.states.get("cover.test_shutter_7") + assert not cover + + +async def test_cover_change_state_via_mqtt( + hass: HomeAssistant, mqtt_mock: MqttMockHAClient, setup_pglab +) -> None: + """Test state update via MQTT.""" + topic = "pglab/discovery/E-Board-DD53AC85/config" + payload = { + "ip": "192.168.1.16", + "mac": "80:34:28:1B:18:5A", + "name": "test", + "hw": "1.0.7", + "fw": "1.0.0", + "type": "E-Board", + "id": "E-Board-DD53AC85", + "manufacturer": "PG LAB Electronics", + "params": {"shutters": 2, "boards": "10000000"}, + } + + async_fire_mqtt_message( + hass, + topic, + json.dumps(payload), + ) + await hass.async_block_till_done() + + # Check initial state is unknown + cover = hass.states.get("cover.test_shutter_0") + assert cover.state == STATE_UNKNOWN + assert not cover.attributes.get(ATTR_ASSUMED_STATE) + + # Simulate the device responds sending mqtt messages and check if the cover state + # change appropriately. + + async_fire_mqtt_message(hass, "pglab/test/shutter/0/state", "OPEN") + await hass.async_block_till_done() + cover = hass.states.get("cover.test_shutter_0") + assert not cover.attributes.get(ATTR_ASSUMED_STATE) + assert cover.state == STATE_OPEN + + async_fire_mqtt_message(hass, "pglab/test/shutter/0/state", "OPENING") + await hass.async_block_till_done() + cover = hass.states.get("cover.test_shutter_0") + assert cover.state == STATE_OPENING + + async_fire_mqtt_message(hass, "pglab/test/shutter/0/state", "CLOSING") + await hass.async_block_till_done() + cover = hass.states.get("cover.test_shutter_0") + assert cover.state == STATE_CLOSING + + async_fire_mqtt_message(hass, "pglab/test/shutter/0/state", "CLOSED") + await hass.async_block_till_done() + cover = hass.states.get("cover.test_shutter_0") + assert cover.state == STATE_CLOSED + + +async def test_cover_mqtt_state_by_calling_service( + hass: HomeAssistant, mqtt_mock: MqttMockHAClient, setup_pglab +) -> None: + """Calling service to OPEN/CLOSE cover and check mqtt state.""" + topic = "pglab/discovery/E-Board-DD53AC85/config" + payload = { + "ip": "192.168.1.16", + "mac": "80:34:28:1B:18:5A", + "name": "test", + "hw": "1.0.7", + "fw": "1.0.0", + "type": "E-Board", + "id": "E-Board-DD53AC85", + "manufacturer": "PG LAB Electronics", + "params": {"shutters": 2, "boards": "10000000"}, + } + + async_fire_mqtt_message( + hass, + topic, + json.dumps(payload), + ) + await hass.async_block_till_done() + + cover = hass.states.get("cover.test_shutter_0") + assert cover.state == STATE_UNKNOWN + assert not cover.attributes.get(ATTR_ASSUMED_STATE) + + # Call HA covers services and verify that the MQTT messages are sent correctly + + await call_service(hass, "cover.test_shutter_0", SERVICE_OPEN_COVER) + mqtt_mock.async_publish.assert_called_once_with( + "pglab/test/shutter/0/set", "OPEN", 0, False + ) + mqtt_mock.async_publish.reset_mock() + + await call_service(hass, "cover.test_shutter_0", SERVICE_STOP_COVER) + mqtt_mock.async_publish.assert_called_once_with( + "pglab/test/shutter/0/set", "STOP", 0, False + ) + mqtt_mock.async_publish.reset_mock() + + await call_service(hass, "cover.test_shutter_0", SERVICE_CLOSE_COVER) + mqtt_mock.async_publish.assert_called_once_with( + "pglab/test/shutter/0/set", "CLOSE", 0, False + ) + mqtt_mock.async_publish.reset_mock() diff --git a/tests/components/pterodactyl/__init__.py b/tests/components/pterodactyl/__init__.py new file mode 100644 index 00000000000..a5b28d67ae3 --- /dev/null +++ b/tests/components/pterodactyl/__init__.py @@ -0,0 +1 @@ +"""Tests for the Pterodactyl integration.""" diff --git a/tests/components/pterodactyl/conftest.py b/tests/components/pterodactyl/conftest.py new file mode 100644 index 00000000000..62326e79207 --- /dev/null +++ b/tests/components/pterodactyl/conftest.py @@ -0,0 +1,155 @@ +"""Common fixtures for the Pterodactyl tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +from pydactyl.responses import PaginatedResponse +import pytest + +from homeassistant.components.pterodactyl.const import DOMAIN +from homeassistant.const import CONF_API_KEY, CONF_URL + +from tests.common import MockConfigEntry + +TEST_URL = "https://192.168.0.1:8080/" +TEST_API_KEY = "TestClientApiKey" +TEST_USER_INPUT = { + CONF_URL: TEST_URL, + CONF_API_KEY: TEST_API_KEY, +} +TEST_SERVER_LIST_DATA = { + "meta": {"pagination": {"total": 2, "count": 2, "per_page": 50, "current_page": 1}}, + "data": [ + { + "object": "server", + "attributes": { + "server_owner": True, + "identifier": "1", + "internal_id": 1, + "uuid": "1-1-1-1-1", + "name": "Test Server 1", + "node": "default_node", + "description": "Description of Test Server 1", + "limits": { + "memory": 2048, + "swap": 1024, + "disk": 10240, + "io": 500, + "cpu": 100, + "threads": None, + "oom_disabled": True, + }, + "invocation": "java -jar test_server1.jar", + "docker_image": "test_docker_image_1", + "egg_features": ["java_version"], + }, + }, + { + "object": "server", + "attributes": { + "server_owner": True, + "identifier": "2", + "internal_id": 2, + "uuid": "2-2-2-2-2", + "name": "Test Server 2", + "node": "default_node", + "description": "Description of Test Server 2", + "limits": { + "memory": 2048, + "swap": 1024, + "disk": 10240, + "io": 500, + "cpu": 100, + "threads": None, + "oom_disabled": True, + }, + "invocation": "java -jar test_server_2.jar", + "docker_image": "test_docker_image2", + "egg_features": ["java_version"], + }, + }, + ], +} +TEST_SERVER = { + "server_owner": True, + "identifier": "1", + "internal_id": 1, + "uuid": "1-1-1-1-1", + "name": "Test Server 1", + "node": "default_node", + "is_node_under_maintenance": False, + "sftp_details": {"ip": "192.168.0.1", "port": 2022}, + "description": "", + "limits": { + "memory": 2048, + "swap": 1024, + "disk": 10240, + "io": 500, + "cpu": 100, + "threads": None, + "oom_disabled": True, + }, + "invocation": "java -jar test.jar", + "docker_image": "test_docker_image", + "egg_features": ["eula", "java_version", "pid_limit"], + "feature_limits": {"databases": 0, "allocations": 0, "backups": 3}, + "status": None, + "is_suspended": False, + "is_installing": False, + "is_transferring": False, + "relationships": {"allocations": {...}, "variables": {...}}, +} +TEST_SERVER_UTILIZATION = { + "current_state": "running", + "is_suspended": False, + "resources": { + "memory_bytes": 1111, + "cpu_absolute": 22, + "disk_bytes": 3333, + "network_rx_bytes": 44, + "network_tx_bytes": 55, + "uptime": 6666, + }, +} + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.pterodactyl.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Create Pterodactyl mock config entry.""" + return MockConfigEntry( + domain=DOMAIN, + unique_id=None, + entry_id="01234567890123456789012345678901", + title=TEST_URL, + data={ + CONF_URL: TEST_URL, + CONF_API_KEY: TEST_API_KEY, + }, + version=1, + ) + + +@pytest.fixture +def mock_pterodactyl(): + """Mock the Pterodactyl API.""" + with patch( + "homeassistant.components.pterodactyl.api.PterodactylClient", autospec=True + ) as mock: + mock.return_value.client.servers.list_servers.return_value = PaginatedResponse( + mock.return_value, "client", TEST_SERVER_LIST_DATA + ) + mock.return_value.client.servers.get_server.return_value = TEST_SERVER + mock.return_value.client.servers.get_server_utilization.return_value = ( + TEST_SERVER_UTILIZATION + ) + + yield mock.return_value diff --git a/tests/components/pterodactyl/test_config_flow.py b/tests/components/pterodactyl/test_config_flow.py new file mode 100644 index 00000000000..14bb2d2f69f --- /dev/null +++ b/tests/components/pterodactyl/test_config_flow.py @@ -0,0 +1,129 @@ +"""Test the Pterodactyl config flow.""" + +from pydactyl import PterodactylClient +from pydactyl.exceptions import ClientConfigError, PterodactylApiError +import pytest + +from homeassistant.components.pterodactyl.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .conftest import TEST_URL, TEST_USER_INPUT + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("mock_pterodactyl", "mock_setup_entry") +async def test_full_flow(hass: HomeAssistant) -> None: + """Test full flow without errors.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + flow_id=result["flow_id"], + user_input=TEST_USER_INPUT, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == TEST_URL + assert result["data"] == TEST_USER_INPUT + + +@pytest.mark.usefixtures("mock_setup_entry") +@pytest.mark.parametrize( + "exception_type", + [ + ClientConfigError, + PterodactylApiError, + ], +) +async def test_recovery_after_api_error( + hass: HomeAssistant, + exception_type, + mock_pterodactyl: PterodactylClient, +) -> None: + """Test recovery after an API error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + mock_pterodactyl.client.servers.list_servers.side_effect = exception_type + + result = await hass.config_entries.flow.async_configure( + flow_id=result["flow_id"], + user_input=TEST_USER_INPUT, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + mock_pterodactyl.reset_mock(side_effect=True) + + result = await hass.config_entries.flow.async_configure( + flow_id=result["flow_id"], user_input=TEST_USER_INPUT + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == TEST_URL + assert result["data"] == TEST_USER_INPUT + + +@pytest.mark.usefixtures("mock_setup_entry") +async def test_recovery_after_unknown_error( + hass: HomeAssistant, + mock_pterodactyl: PterodactylClient, +) -> None: + """Test recovery after an API error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + mock_pterodactyl.client.servers.list_servers.side_effect = Exception + + result = await hass.config_entries.flow.async_configure( + flow_id=result["flow_id"], + user_input=TEST_USER_INPUT, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "unknown"} + + mock_pterodactyl.reset_mock(side_effect=True) + + result = await hass.config_entries.flow.async_configure( + flow_id=result["flow_id"], user_input=TEST_USER_INPUT + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == TEST_URL + assert result["data"] == TEST_USER_INPUT + + +@pytest.mark.usefixtures("mock_setup_entry") +async def test_service_already_configured( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_pterodactyl: PterodactylClient, +) -> None: + """Test config flow abort if the Pterodactyl server is already configured.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER}, data=TEST_USER_INPUT + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/pvoutput/test_sensor.py b/tests/components/pvoutput/test_sensor.py index fbcff94be60..36a37653efe 100644 --- a/tests/components/pvoutput/test_sensor.py +++ b/tests/components/pvoutput/test_sensor.py @@ -30,8 +30,8 @@ async def test_sensors( ) -> None: """Test the PVOutput sensors.""" - state = hass.states.get("sensor.frenck_s_solar_farm_energy_consumed") - entry = entity_registry.async_get("sensor.frenck_s_solar_farm_energy_consumed") + state = hass.states.get("sensor.frenck_s_solar_farm_energy_consumption") + entry = entity_registry.async_get("sensor.frenck_s_solar_farm_energy_consumption") assert entry assert state assert entry.unique_id == "12345_energy_consumption" @@ -40,14 +40,14 @@ async def test_sensors( assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY assert ( state.attributes.get(ATTR_FRIENDLY_NAME) - == "Frenck's Solar Farm Energy consumed" + == "Frenck's Solar Farm Energy consumption" ) assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL_INCREASING assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.WATT_HOUR assert ATTR_ICON not in state.attributes - state = hass.states.get("sensor.frenck_s_solar_farm_energy_generated") - entry = entity_registry.async_get("sensor.frenck_s_solar_farm_energy_generated") + state = hass.states.get("sensor.frenck_s_solar_farm_energy_generation") + entry = entity_registry.async_get("sensor.frenck_s_solar_farm_energy_generation") assert entry assert state assert entry.unique_id == "12345_energy_generation" @@ -56,7 +56,7 @@ async def test_sensors( assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY assert ( state.attributes.get(ATTR_FRIENDLY_NAME) - == "Frenck's Solar Farm Energy generated" + == "Frenck's Solar Farm Energy generation" ) assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL_INCREASING assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.WATT_HOUR @@ -78,8 +78,8 @@ async def test_sensors( assert ATTR_DEVICE_CLASS not in state.attributes assert ATTR_ICON not in state.attributes - state = hass.states.get("sensor.frenck_s_solar_farm_power_consumed") - entry = entity_registry.async_get("sensor.frenck_s_solar_farm_power_consumed") + state = hass.states.get("sensor.frenck_s_solar_farm_power_consumption") + entry = entity_registry.async_get("sensor.frenck_s_solar_farm_power_consumption") assert entry assert state assert entry.unique_id == "12345_power_consumption" @@ -87,14 +87,15 @@ async def test_sensors( assert state.state == "2500.0" assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.POWER assert ( - state.attributes.get(ATTR_FRIENDLY_NAME) == "Frenck's Solar Farm Power consumed" + state.attributes.get(ATTR_FRIENDLY_NAME) + == "Frenck's Solar Farm Power consumption" ) assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfPower.WATT assert ATTR_ICON not in state.attributes - state = hass.states.get("sensor.frenck_s_solar_farm_power_generated") - entry = entity_registry.async_get("sensor.frenck_s_solar_farm_power_generated") + state = hass.states.get("sensor.frenck_s_solar_farm_power_generation") + entry = entity_registry.async_get("sensor.frenck_s_solar_farm_power_generation") assert entry assert state assert entry.unique_id == "12345_power_generation" @@ -103,7 +104,7 @@ async def test_sensors( assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.POWER assert ( state.attributes.get(ATTR_FRIENDLY_NAME) - == "Frenck's Solar Farm Power generated" + == "Frenck's Solar Farm Power generation" ) assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfPower.WATT diff --git a/tests/components/pyload/snapshots/test_diagnostics.ambr b/tests/components/pyload/snapshots/test_diagnostics.ambr index 81a5d750bc0..d773804bf73 100644 --- a/tests/components/pyload/snapshots/test_diagnostics.ambr +++ b/tests/components/pyload/snapshots/test_diagnostics.ambr @@ -13,6 +13,7 @@ 'download': True, 'free_space': 99999999999, 'pause': False, + 'proxy': None, 'queue': 6, 'reconnect': False, 'speed': 5405963.0, diff --git a/tests/components/qbus/conftest.py b/tests/components/qbus/conftest.py index 8268d091bda..f1fd96c321b 100644 --- a/tests/components/qbus/conftest.py +++ b/tests/components/qbus/conftest.py @@ -1,5 +1,7 @@ """Test fixtures for qbus.""" +import json + import pytest from homeassistant.components.qbus.const import CONF_SERIAL_NUMBER, DOMAIN @@ -7,9 +9,13 @@ from homeassistant.const import CONF_ID from homeassistant.core import HomeAssistant from homeassistant.util.json import JsonObjectType -from .const import FIXTURE_PAYLOAD_CONFIG +from .const import FIXTURE_PAYLOAD_CONFIG, TOPIC_CONFIG -from tests.common import MockConfigEntry, load_json_object_fixture +from tests.common import ( + MockConfigEntry, + async_fire_mqtt_message, + load_json_object_fixture, +) @pytest.fixture @@ -31,3 +37,18 @@ def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: def payload_config() -> JsonObjectType: """Return the config topic payload.""" return load_json_object_fixture(FIXTURE_PAYLOAD_CONFIG, DOMAIN) + + +@pytest.fixture +async def setup_integration( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + payload_config: JsonObjectType, +) -> None: + """Set up the integration.""" + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + async_fire_mqtt_message(hass, TOPIC_CONFIG, json.dumps(payload_config)) + await hass.async_block_till_done() diff --git a/tests/components/qbus/fixtures/payload_config.json b/tests/components/qbus/fixtures/payload_config.json index e2c7f463e4e..fc204c975ad 100644 --- a/tests/components/qbus/fixtures/payload_config.json +++ b/tests/components/qbus/fixtures/payload_config.json @@ -46,7 +46,7 @@ { "id": "UL15", "location": "Media room", - "locationId": 0, + "locationId": 1, "name": "MEDIA ROOM", "originalName": "MEDIA ROOM", "refId": "000001/28", @@ -65,6 +65,40 @@ "write": true } } + }, + { + "id": "UL20", + "location": "Living", + "locationId": 0, + "name": "LIVING TH", + "originalName": "LIVING TH", + "refId": "000001/120", + "type": "thermo", + "actions": {}, + "properties": { + "currRegime": { + "enumValues": ["MANUEEL", "VORST", "ECONOMY", "COMFORT", "NACHT"], + "read": true, + "type": "enumString", + "write": true + }, + "currTemp": { + "max": 35, + "min": 0, + "read": true, + "step": 0.5, + "type": "number", + "write": false + }, + "setTemp": { + "max": 35, + "min": 0, + "read": true, + "step": 0.5, + "type": "number", + "write": true + } + } } ] } diff --git a/tests/components/qbus/test_climate.py b/tests/components/qbus/test_climate.py new file mode 100644 index 00000000000..d521e310984 --- /dev/null +++ b/tests/components/qbus/test_climate.py @@ -0,0 +1,228 @@ +"""Test Qbus light entities.""" + +from datetime import timedelta +from unittest.mock import MagicMock, call + +import pytest + +from homeassistant.components.climate import ( + ATTR_CURRENT_TEMPERATURE, + ATTR_HVAC_ACTION, + ATTR_PRESET_MODE, + DOMAIN as CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + SERVICE_SET_TEMPERATURE, + ClimateEntity, + HVACAction, + HVACMode, +) +from homeassistant.components.qbus.climate import STATE_REQUEST_DELAY +from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers.entity_platform import EntityPlatform +from homeassistant.util import dt as dt_util + +from tests.common import async_fire_mqtt_message, async_fire_time_changed +from tests.typing import MqttMockHAClient + +_CURRENT_TEMPERATURE = 21.5 +_SET_TEMPERATURE = 20.5 +_REGIME = "COMFORT" + +_PAYLOAD_CLIMATE_STATE_TEMP = ( + f'{{"id":"UL20","properties":{{"setTemp":{_SET_TEMPERATURE}}},"type":"event"}}' +) +_PAYLOAD_CLIMATE_STATE_TEMP_FULL = f'{{"id":"UL20","properties":{{"currRegime":"MANUEEL","currTemp":{_CURRENT_TEMPERATURE},"setTemp":{_SET_TEMPERATURE}}},"type":"state"}}' + +_PAYLOAD_CLIMATE_STATE_PRESET = ( + f'{{"id":"UL20","properties":{{"currRegime":"{_REGIME}"}},"type":"event"}}' +) +_PAYLOAD_CLIMATE_STATE_PRESET_FULL = f'{{"id":"UL20","properties":{{"currRegime":"{_REGIME}","currTemp":{_CURRENT_TEMPERATURE},"setTemp":22.0}},"type":"state"}}' + +_PAYLOAD_CLIMATE_SET_TEMP = f'{{"id": "UL20", "type": "state", "properties": {{"setTemp": {_SET_TEMPERATURE}}}}}' +_PAYLOAD_CLIMATE_SET_PRESET = ( + '{"id": "UL20", "type": "state", "properties": {"currRegime": "COMFORT"}}' +) + +_TOPIC_CLIMATE_STATE = "cloudapp/QBUSMQTTGW/UL1/UL20/state" +_TOPIC_CLIMATE_SET_STATE = "cloudapp/QBUSMQTTGW/UL1/UL20/setState" +_TOPIC_GET_STATE = "cloudapp/QBUSMQTTGW/getState" + +_CLIMATE_ENTITY_ID = "climate.living_th" + + +async def test_climate( + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + setup_integration: None, +) -> None: + """Test climate temperature & preset.""" + + # Set temperature + mqtt_mock.reset_mock() + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: _CLIMATE_ENTITY_ID, + ATTR_TEMPERATURE: _SET_TEMPERATURE, + }, + blocking=True, + ) + + mqtt_mock.async_publish.assert_called_once_with( + _TOPIC_CLIMATE_SET_STATE, _PAYLOAD_CLIMATE_SET_TEMP, 0, False + ) + + # Simulate a partial state response + async_fire_mqtt_message(hass, _TOPIC_CLIMATE_STATE, _PAYLOAD_CLIMATE_STATE_TEMP) + await hass.async_block_till_done() + + # Check state + entity = hass.states.get(_CLIMATE_ENTITY_ID) + assert entity + assert entity.attributes[ATTR_TEMPERATURE] == _SET_TEMPERATURE + assert entity.attributes[ATTR_CURRENT_TEMPERATURE] is None + assert entity.attributes[ATTR_PRESET_MODE] == "MANUEEL" + assert entity.attributes[ATTR_HVAC_ACTION] == HVACAction.IDLE + assert entity.state == HVACMode.HEAT + + # After a delay, a full state request should've been sent + _wait_and_assert_state_request(hass, mqtt_mock) + + # Simulate a full state response + async_fire_mqtt_message( + hass, _TOPIC_CLIMATE_STATE, _PAYLOAD_CLIMATE_STATE_TEMP_FULL + ) + await hass.async_block_till_done() + + # Check state after full state response + entity = hass.states.get(_CLIMATE_ENTITY_ID) + assert entity + assert entity.attributes[ATTR_TEMPERATURE] == _SET_TEMPERATURE + assert entity.attributes[ATTR_CURRENT_TEMPERATURE] == _CURRENT_TEMPERATURE + assert entity.attributes[ATTR_PRESET_MODE] == "MANUEEL" + assert entity.attributes[ATTR_HVAC_ACTION] == HVACAction.IDLE + assert entity.state == HVACMode.HEAT + + # Set preset + mqtt_mock.reset_mock() + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + { + ATTR_ENTITY_ID: _CLIMATE_ENTITY_ID, + ATTR_PRESET_MODE: _REGIME, + }, + blocking=True, + ) + + mqtt_mock.async_publish.assert_called_once_with( + _TOPIC_CLIMATE_SET_STATE, _PAYLOAD_CLIMATE_SET_PRESET, 0, False + ) + + # Simulate a partial state response + async_fire_mqtt_message(hass, _TOPIC_CLIMATE_STATE, _PAYLOAD_CLIMATE_STATE_PRESET) + await hass.async_block_till_done() + + # Check state + entity = hass.states.get(_CLIMATE_ENTITY_ID) + assert entity + assert entity.attributes[ATTR_TEMPERATURE] == _SET_TEMPERATURE + assert entity.attributes[ATTR_CURRENT_TEMPERATURE] == _CURRENT_TEMPERATURE + assert entity.attributes[ATTR_PRESET_MODE] == _REGIME + assert entity.attributes[ATTR_HVAC_ACTION] == HVACAction.IDLE + assert entity.state == HVACMode.HEAT + + # After a delay, a full state request should've been sent + _wait_and_assert_state_request(hass, mqtt_mock) + + # Simulate a full state response + async_fire_mqtt_message( + hass, _TOPIC_CLIMATE_STATE, _PAYLOAD_CLIMATE_STATE_PRESET_FULL + ) + await hass.async_block_till_done() + + # Check state after full state response + entity = hass.states.get(_CLIMATE_ENTITY_ID) + assert entity + assert entity.attributes[ATTR_TEMPERATURE] == 22.0 + assert entity.attributes[ATTR_CURRENT_TEMPERATURE] == _CURRENT_TEMPERATURE + assert entity.attributes[ATTR_PRESET_MODE] == _REGIME + assert entity.attributes[ATTR_HVAC_ACTION] == HVACAction.HEATING + assert entity.state == HVACMode.HEAT + + +async def test_climate_when_invalid_state_received( + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + setup_integration: None, +) -> None: + """Test climate when no valid state is received.""" + + platform: EntityPlatform = hass.data["entity_components"][CLIMATE_DOMAIN] + entity: ClimateEntity = next( + ( + entity + for entity in platform.entities + if entity.entity_id == _CLIMATE_ENTITY_ID + ), + None, + ) + + assert entity + entity.async_schedule_update_ha_state = MagicMock() + + # Simulate state response + async_fire_mqtt_message(hass, _TOPIC_CLIMATE_STATE, "") + await hass.async_block_till_done() + + entity.async_schedule_update_ha_state.assert_not_called() + + +async def test_climate_with_fast_subsequent_changes( + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + setup_integration: None, +) -> None: + """Test climate with fast subsequent changes.""" + + # Simulate two subsequent partial state responses + async_fire_mqtt_message(hass, _TOPIC_CLIMATE_STATE, _PAYLOAD_CLIMATE_STATE_TEMP) + await hass.async_block_till_done() + async_fire_mqtt_message(hass, _TOPIC_CLIMATE_STATE, _PAYLOAD_CLIMATE_STATE_TEMP) + await hass.async_block_till_done() + + # State request should be requested only once + _wait_and_assert_state_request(hass, mqtt_mock) + + +async def test_climate_with_unknown_preset( + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + setup_integration: None, +) -> None: + """Test climate with passing an unknown preset value.""" + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + { + ATTR_ENTITY_ID: _CLIMATE_ENTITY_ID, + ATTR_PRESET_MODE: "What is cooler than being cool?", + }, + blocking=True, + ) + + +def _wait_and_assert_state_request( + hass: HomeAssistant, mqtt_mock: MqttMockHAClient +) -> None: + mqtt_mock.reset_mock() + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(STATE_REQUEST_DELAY)) + mqtt_mock.async_publish.assert_has_calls( + [call(_TOPIC_GET_STATE, '["UL20"]', 0, False)], + any_order=True, + ) diff --git a/tests/components/qbus/test_light.py b/tests/components/qbus/test_light.py index c64219f1269..2db2c622289 100644 --- a/tests/components/qbus/test_light.py +++ b/tests/components/qbus/test_light.py @@ -1,7 +1,5 @@ """Test Qbus light entities.""" -import json - from homeassistant.components.light import ( ATTR_BRIGHTNESS, DOMAIN as LIGHT_DOMAIN, @@ -10,11 +8,8 @@ from homeassistant.components.light import ( ) from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant -from homeassistant.util.json import JsonObjectType -from .const import TOPIC_CONFIG - -from tests.common import MockConfigEntry, async_fire_mqtt_message +from tests.common import async_fire_mqtt_message from tests.typing import MqttMockHAClient # 186 = 73% (rounded) @@ -44,17 +39,10 @@ _LIGHT_ENTITY_ID = "light.media_room" async def test_light( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - mock_config_entry: MockConfigEntry, - payload_config: JsonObjectType, + setup_integration: None, ) -> None: """Test turning on and off.""" - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - async_fire_mqtt_message(hass, TOPIC_CONFIG, json.dumps(payload_config)) - await hass.async_block_till_done() - # Switch ON mqtt_mock.reset_mock() await hass.services.async_call( diff --git a/tests/components/qbus/test_switch.py b/tests/components/qbus/test_switch.py index 83bb667e4eb..ddb63e933da 100644 --- a/tests/components/qbus/test_switch.py +++ b/tests/components/qbus/test_switch.py @@ -1,7 +1,5 @@ """Test Qbus switch entities.""" -import json - from homeassistant.components.switch import ( DOMAIN as SWITCH_DOMAIN, SERVICE_TURN_OFF, @@ -9,11 +7,8 @@ from homeassistant.components.switch import ( ) from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant -from homeassistant.util.json import JsonObjectType -from .const import TOPIC_CONFIG - -from tests.common import MockConfigEntry, async_fire_mqtt_message +from tests.common import async_fire_mqtt_message from tests.typing import MqttMockHAClient _PAYLOAD_SWITCH_STATE_ON = '{"id":"UL10","properties":{"value":true},"type":"state"}' @@ -34,17 +29,10 @@ _SWITCH_ENTITY_ID = "switch.living" async def test_switch_turn_on_off( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - mock_config_entry: MockConfigEntry, - payload_config: JsonObjectType, + setup_integration: None, ) -> None: """Test turning on and off.""" - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - async_fire_mqtt_message(hass, TOPIC_CONFIG, json.dumps(payload_config)) - await hass.async_block_till_done() - # Switch ON mqtt_mock.reset_mock() await hass.services.async_call( diff --git a/tests/components/rainforest_eagle/test_sensor.py b/tests/components/rainforest_eagle/test_sensor.py index 31630913a70..b7e811b69ef 100644 --- a/tests/components/rainforest_eagle/test_sensor.py +++ b/tests/components/rainforest_eagle/test_sensor.py @@ -10,17 +10,17 @@ async def test_sensors_200(hass: HomeAssistant, setup_rainforest_200) -> None: """Test the sensors.""" assert len(hass.states.async_all()) == 3 - demand = hass.states.get("sensor.eagle_200_meter_power_demand") + demand = hass.states.get("sensor.eagle_200_power_demand") assert demand is not None assert demand.state == "1.152000" assert demand.attributes["unit_of_measurement"] == "kW" - delivered = hass.states.get("sensor.eagle_200_total_meter_energy_delivered") + delivered = hass.states.get("sensor.eagle_200_total_energy_delivered") assert delivered is not None assert delivered.state == "45251.285000" assert delivered.attributes["unit_of_measurement"] == "kWh" - received = hass.states.get("sensor.eagle_200_total_meter_energy_received") + received = hass.states.get("sensor.eagle_200_total_energy_received") assert received is not None assert received.state == "232.232000" assert received.attributes["unit_of_measurement"] == "kWh" @@ -33,7 +33,7 @@ async def test_sensors_200(hass: HomeAssistant, setup_rainforest_200) -> None: assert len(hass.states.async_all()) == 4 - price = hass.states.get("sensor.eagle_200_meter_price") + price = hass.states.get("sensor.eagle_200_energy_price") assert price is not None assert price.state == "0.053990" assert price.attributes["unit_of_measurement"] == "USD/kWh" @@ -43,17 +43,17 @@ async def test_sensors_100(hass: HomeAssistant, setup_rainforest_100) -> None: """Test the sensors.""" assert len(hass.states.async_all()) == 3 - demand = hass.states.get("sensor.eagle_100_meter_power_demand") + demand = hass.states.get("sensor.eagle_100_power_demand") assert demand is not None assert demand.state == "1.152000" assert demand.attributes["unit_of_measurement"] == "kW" - delivered = hass.states.get("sensor.eagle_100_total_meter_energy_delivered") + delivered = hass.states.get("sensor.eagle_100_total_energy_delivered") assert delivered is not None assert delivered.state == "45251.285000" assert delivered.attributes["unit_of_measurement"] == "kWh" - received = hass.states.get("sensor.eagle_100_total_meter_energy_received") + received = hass.states.get("sensor.eagle_100_total_energy_received") assert received is not None assert received.state == "232.232000" assert received.attributes["unit_of_measurement"] == "kWh" diff --git a/tests/components/rainforest_raven/snapshots/test_sensor.ambr b/tests/components/rainforest_raven/snapshots/test_sensor.ambr index 618766c1613..bf369d374e0 100644 --- a/tests/components/rainforest_raven/snapshots/test_sensor.ambr +++ b/tests/components/rainforest_raven/snapshots/test_sensor.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_sensors[sensor.raven_device_meter_power_demand-entry] +# name: test_sensors[sensor.raven_device_power_demand-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -14,7 +14,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.raven_device_meter_power_demand', + 'entity_id': 'sensor.raven_device_power_demand', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -26,7 +26,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Meter power demand', + 'original_name': 'Power demand', 'platform': 'rainforest_raven', 'previous_unique_id': None, 'supported_features': 0, @@ -35,23 +35,23 @@ 'unit_of_measurement': , }) # --- -# name: test_sensors[sensor.raven_device_meter_power_demand-state] +# name: test_sensors[sensor.raven_device_power_demand-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'RAVEn Device Meter power demand', + 'friendly_name': 'RAVEn Device Power demand', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.raven_device_meter_power_demand', + 'entity_id': 'sensor.raven_device_power_demand', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1.2345', }) # --- -# name: test_sensors[sensor.raven_device_meter_price-entry] +# name: test_sensors[sensor.raven_device_energy_price-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -66,7 +66,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.raven_device_meter_price', + 'entity_id': 'sensor.raven_device_energy_price', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -78,33 +78,33 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Meter price', + 'original_name': 'Energy price', 'platform': 'rainforest_raven', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'meter_price', + 'translation_key': 'energy_price', 'unique_id': '1234567890abcdef.PriceCluster.price', 'unit_of_measurement': 'USD/kWh', }) # --- -# name: test_sensors[sensor.raven_device_meter_price-state] +# name: test_sensors[sensor.raven_device_energy_price-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'RAVEn Device Meter price', + 'friendly_name': 'RAVEn Device Energy price', 'rate_label': 'Set by user', 'state_class': , 'tier': 3, 'unit_of_measurement': 'USD/kWh', }), 'context': , - 'entity_id': 'sensor.raven_device_meter_price', + 'entity_id': 'sensor.raven_device_energy_price', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.10', }) # --- -# name: test_sensors[sensor.raven_device_meter_signal_strength-entry] +# name: test_sensors[sensor.raven_device_signal_strength-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -119,7 +119,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': , - 'entity_id': 'sensor.raven_device_meter_signal_strength', + 'entity_id': 'sensor.raven_device_signal_strength', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -131,7 +131,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Meter signal strength', + 'original_name': 'Signal strength', 'platform': 'rainforest_raven', 'previous_unique_id': None, 'supported_features': 0, @@ -140,23 +140,23 @@ 'unit_of_measurement': '%', }) # --- -# name: test_sensors[sensor.raven_device_meter_signal_strength-state] +# name: test_sensors[sensor.raven_device_signal_strength-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'channel': 13, - 'friendly_name': 'RAVEn Device Meter signal strength', + 'friendly_name': 'RAVEn Device Signal strength', 'state_class': , 'unit_of_measurement': '%', }), 'context': , - 'entity_id': 'sensor.raven_device_meter_signal_strength', + 'entity_id': 'sensor.raven_device_signal_strength', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '100', }) # --- -# name: test_sensors[sensor.raven_device_total_meter_energy_delivered-entry] +# name: test_sensors[sensor.raven_device_total_energy_delivered-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -171,7 +171,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.raven_device_total_meter_energy_delivered', + 'entity_id': 'sensor.raven_device_total_energy_delivered', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -183,7 +183,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Total meter energy delivered', + 'original_name': 'Total energy delivered', 'platform': 'rainforest_raven', 'previous_unique_id': None, 'supported_features': 0, @@ -192,23 +192,23 @@ 'unit_of_measurement': , }) # --- -# name: test_sensors[sensor.raven_device_total_meter_energy_delivered-state] +# name: test_sensors[sensor.raven_device_total_energy_delivered-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'RAVEn Device Total meter energy delivered', + 'friendly_name': 'RAVEn Device Total energy delivered', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.raven_device_total_meter_energy_delivered', + 'entity_id': 'sensor.raven_device_total_energy_delivered', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '23456.7890', }) # --- -# name: test_sensors[sensor.raven_device_total_meter_energy_received-entry] +# name: test_sensors[sensor.raven_device_total_energy_received-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -223,7 +223,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.raven_device_total_meter_energy_received', + 'entity_id': 'sensor.raven_device_total_energy_received', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -235,7 +235,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Total meter energy received', + 'original_name': 'Total energy received', 'platform': 'rainforest_raven', 'previous_unique_id': None, 'supported_features': 0, @@ -244,16 +244,16 @@ 'unit_of_measurement': , }) # --- -# name: test_sensors[sensor.raven_device_total_meter_energy_received-state] +# name: test_sensors[sensor.raven_device_total_energy_received-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'RAVEn Device Total meter energy received', + 'friendly_name': 'RAVEn Device Total energy received', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.raven_device_total_meter_energy_received', + 'entity_id': 'sensor.raven_device_total_energy_received', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/recorder/auto_repairs/statistics/test_schema.py b/tests/components/recorder/auto_repairs/statistics/test_schema.py index 352a2345052..99d6705e4a4 100644 --- a/tests/components/recorder/auto_repairs/statistics/test_schema.py +++ b/tests/components/recorder/auto_repairs/statistics/test_schema.py @@ -87,6 +87,7 @@ async def test_validate_db_schema_fix_float_issue( "created_ts DOUBLE PRECISION", "start_ts DOUBLE PRECISION", "mean DOUBLE PRECISION", + "mean_weight DOUBLE PRECISION", "min DOUBLE PRECISION", "max DOUBLE PRECISION", "last_reset_ts DOUBLE PRECISION", diff --git a/tests/components/recorder/common.py b/tests/components/recorder/common.py index 28eb097f576..d381c225275 100644 --- a/tests/components/recorder/common.py +++ b/tests/components/recorder/common.py @@ -35,7 +35,8 @@ from homeassistant.components.recorder.db_schema import ( StatesMeta, ) from homeassistant.components.recorder.tasks import RecorderTask, StatisticsTask -from homeassistant.const import UnitOfTemperature +from homeassistant.components.sensor import SensorDeviceClass, SensorStateClass +from homeassistant.const import DEGREE, UnitOfTemperature from homeassistant.core import Event, HomeAssistant, State from homeassistant.helpers import recorder as recorder_helper from homeassistant.util import dt as dt_util @@ -290,6 +291,7 @@ def record_states( sns2 = "sensor.test2" sns3 = "sensor.test3" sns4 = "sensor.test4" + sns5 = "sensor.wind_direction" sns1_attr = { "device_class": "temperature", "state_class": "measurement", @@ -302,6 +304,11 @@ def record_states( } sns3_attr = {"device_class": "temperature"} sns4_attr = {} + sns5_attr = { + "device_class": SensorDeviceClass.WIND_DIRECTION, + "state_class": SensorStateClass.MEASUREMENT_ANGLE, + "unit_of_measurement": DEGREE, + } def set_state(entity_id, state, **kwargs): """Set the state.""" @@ -315,7 +322,7 @@ def record_states( three = two + timedelta(seconds=30 * 5) four = three + timedelta(seconds=14 * 5) - states = {mp: [], sns1: [], sns2: [], sns3: [], sns4: []} + states = {mp: [], sns1: [], sns2: [], sns3: [], sns4: [], sns5: []} with freeze_time(one) as freezer: states[mp].append( set_state(mp, "idle", attributes={"media_title": str(sentinel.mt1)}) @@ -324,6 +331,7 @@ def record_states( states[sns2].append(set_state(sns2, "10", attributes=sns2_attr)) states[sns3].append(set_state(sns3, "10", attributes=sns3_attr)) states[sns4].append(set_state(sns4, "10", attributes=sns4_attr)) + states[sns5].append(set_state(sns5, "10", attributes=sns5_attr)) freezer.move_to(one + timedelta(microseconds=1)) states[mp].append( @@ -335,12 +343,14 @@ def record_states( states[sns2].append(set_state(sns2, "15", attributes=sns2_attr)) states[sns3].append(set_state(sns3, "15", attributes=sns3_attr)) states[sns4].append(set_state(sns4, "15", attributes=sns4_attr)) + states[sns5].append(set_state(sns5, "350", attributes=sns5_attr)) freezer.move_to(three) states[sns1].append(set_state(sns1, "20", attributes=sns1_attr)) states[sns2].append(set_state(sns2, "20", attributes=sns2_attr)) states[sns3].append(set_state(sns3, "20", attributes=sns3_attr)) states[sns4].append(set_state(sns4, "20", attributes=sns4_attr)) + states[sns5].append(set_state(sns5, "5", attributes=sns5_attr)) return zero, four, states diff --git a/tests/components/recorder/db_schema_32.py b/tests/components/recorder/db_schema_32.py index daa7fb6977c..9c19a1c7405 100644 --- a/tests/components/recorder/db_schema_32.py +++ b/tests/components/recorder/db_schema_32.py @@ -583,6 +583,8 @@ class StatisticsBase: last_reset_ts = Column(TIMESTAMP_TYPE) state = Column(DOUBLE_TYPE) sum = Column(DOUBLE_TYPE) + # *** Not originally in v32, only added for tests. Added in v49 + mean_weight = Column(DOUBLE_TYPE) @classmethod def from_stats(cls, metadata_id: int, stats: StatisticData) -> Self: diff --git a/tests/components/recorder/table_managers/test_statistics_meta.py b/tests/components/recorder/table_managers/test_statistics_meta.py index 66edb84c3ef..1af60b71ed5 100644 --- a/tests/components/recorder/table_managers/test_statistics_meta.py +++ b/tests/components/recorder/table_managers/test_statistics_meta.py @@ -2,10 +2,19 @@ from __future__ import annotations +import logging +import threading + import pytest from homeassistant.components import recorder +from homeassistant.components.recorder.db_schema import StatisticsMeta +from homeassistant.components.recorder.models import ( + StatisticMeanType, + StatisticMetaData, +) from homeassistant.components.recorder.util import session_scope +from homeassistant.const import DEGREE from homeassistant.core import HomeAssistant from tests.typing import RecorderInstanceGenerator @@ -55,3 +64,78 @@ async def test_unsafe_calls_to_statistics_meta_manager( session, statistic_ids=["light.kitchen"], ) + + +async def test_invalid_mean_types( + async_setup_recorder_instance: RecorderInstanceGenerator, + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test passing invalid mean types will be skipped and logged.""" + instance = await async_setup_recorder_instance( + hass, {recorder.CONF_COMMIT_INTERVAL: 0} + ) + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + + valid_metadata: dict[str, tuple[int, StatisticMetaData]] = { + "sensor.energy": ( + 1, + { + "mean_type": StatisticMeanType.NONE, + "has_mean": False, + "has_sum": True, + "name": "Total imported energy", + "source": "recorder", + "statistic_id": "sensor.energy", + "unit_of_measurement": "kWh", + }, + ), + "sensor.wind_direction": ( + 2, + { + "mean_type": StatisticMeanType.CIRCULAR, + "has_mean": False, + "has_sum": False, + "name": "Wind direction", + "source": "recorder", + "statistic_id": "sensor.wind_direction", + "unit_of_measurement": DEGREE, + }, + ), + "sensor.wind_speed": ( + 3, + { + "mean_type": StatisticMeanType.ARITHMETIC, + "has_mean": True, + "has_sum": False, + "name": "Wind speed", + "source": "recorder", + "statistic_id": "sensor.wind_speed", + "unit_of_measurement": "km/h", + }, + ), + } + manager = instance.statistics_meta_manager + with instance.get_session() as session: + for _, metadata in valid_metadata.values(): + session.add(StatisticsMeta.from_meta(metadata)) + + # Add invalid mean type + session.add( + StatisticsMeta( + statistic_id="sensor.invalid", + source="recorder", + has_sum=False, + name="Invalid", + mean_type=12345, + ) + ) + session.commit() + + # Check that the invalid mean type was skipped + assert manager.get_many(session) == valid_metadata + assert ( + "homeassistant.components.recorder.table_managers.statistics_meta", + logging.WARNING, + "Invalid mean type found for statistic_id: sensor.invalid, mean_type: 12345. Skipping", + ) in caplog.record_tuples diff --git a/tests/components/recorder/test_migration_from_schema_32.py b/tests/components/recorder/test_migration_from_schema_32.py index 012e227c11a..7fd73aaf735 100644 --- a/tests/components/recorder/test_migration_from_schema_32.py +++ b/tests/components/recorder/test_migration_from_schema_32.py @@ -1538,6 +1538,7 @@ async def test_stats_timestamp_conversion_is_reentrant( "last_reset_ts": one_year_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": process_timestamp(one_year_ago).replace(tzinfo=None), @@ -1553,6 +1554,7 @@ async def test_stats_timestamp_conversion_is_reentrant( "last_reset_ts": six_months_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1568,6 +1570,7 @@ async def test_stats_timestamp_conversion_is_reentrant( "last_reset_ts": one_month_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": process_timestamp(one_month_ago).replace(tzinfo=None), @@ -1705,6 +1708,7 @@ async def test_stats_timestamp_with_one_by_one( "last_reset_ts": one_year_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1720,6 +1724,7 @@ async def test_stats_timestamp_with_one_by_one( "last_reset_ts": six_months_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1735,6 +1740,7 @@ async def test_stats_timestamp_with_one_by_one( "last_reset_ts": one_month_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1758,6 +1764,7 @@ async def test_stats_timestamp_with_one_by_one( "last_reset_ts": one_year_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1773,6 +1780,7 @@ async def test_stats_timestamp_with_one_by_one( "last_reset_ts": six_months_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1788,6 +1796,7 @@ async def test_stats_timestamp_with_one_by_one( "last_reset_ts": one_month_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1932,6 +1941,7 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates( "last_reset_ts": one_year_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1947,6 +1957,7 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates( "last_reset_ts": six_months_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1962,6 +1973,7 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates( "last_reset_ts": one_month_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1985,6 +1997,7 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates( "last_reset_ts": six_months_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, diff --git a/tests/components/recorder/test_statistics.py b/tests/components/recorder/test_statistics.py index ed883c5403e..ed754723426 100644 --- a/tests/components/recorder/test_statistics.py +++ b/tests/components/recorder/test_statistics.py @@ -12,6 +12,7 @@ from homeassistant.components import recorder from homeassistant.components.recorder import Recorder, history, statistics from homeassistant.components.recorder.db_schema import StatisticsShortTerm from homeassistant.components.recorder.models import ( + StatisticMeanType, datetime_to_timestamp_or_none, process_timestamp, ) @@ -123,32 +124,38 @@ async def test_compile_hourly_statistics( stats = get_latest_short_term_statistics_with_session( hass, session, - {"sensor.test1"}, + {"sensor.test1", "sensor.wind_direction"}, {"last_reset", "max", "mean", "min", "state", "sum"}, ) assert stats == {} - for kwargs in ({}, {"statistic_ids": ["sensor.test1"]}): + for kwargs in ({}, {"statistic_ids": ["sensor.test1", "sensor.wind_direction"]}): stats = statistics_during_period(hass, zero, period="5minute", **kwargs) assert stats == {} - stats = get_last_short_term_statistics( - hass, - 0, - "sensor.test1", - True, - {"last_reset", "max", "mean", "min", "state", "sum"}, - ) - assert stats == {} + for sensor in ("sensor.test1", "sensor.wind_direction"): + stats = get_last_short_term_statistics( + hass, + 0, + sensor, + True, + {"last_reset", "max", "mean", "min", "state", "sum"}, + ) + assert stats == {} do_adhoc_statistics(hass, start=zero) do_adhoc_statistics(hass, start=four) await async_wait_recording_done(hass) - metadata = get_metadata(hass, statistic_ids={"sensor.test1", "sensor.test2"}) - assert metadata["sensor.test1"][1]["has_mean"] is True - assert metadata["sensor.test1"][1]["has_sum"] is False - assert metadata["sensor.test2"][1]["has_mean"] is True - assert metadata["sensor.test2"][1]["has_sum"] is False + metadata = get_metadata( + hass, statistic_ids={"sensor.test1", "sensor.test2", "sensor.wind_direction"} + ) + for sensor, mean_type in ( + ("sensor.test1", StatisticMeanType.ARITHMETIC), + ("sensor.test2", StatisticMeanType.ARITHMETIC), + ("sensor.wind_direction", StatisticMeanType.CIRCULAR), + ): + assert metadata[sensor][1]["mean_type"] is mean_type + assert metadata[sensor][1]["has_sum"] is False expected_1 = { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), @@ -168,11 +175,39 @@ async def test_compile_hourly_statistics( expected_stats1 = [expected_1, expected_2] expected_stats2 = [expected_1, expected_2] + expected_stats_wind_direction1 = { + "start": process_timestamp(zero).timestamp(), + "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(358.6387003873801), + "min": None, + "max": None, + "last_reset": None, + } + expected_stats_wind_direction2 = { + "start": process_timestamp(four).timestamp(), + "end": process_timestamp(four + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(5), + "min": None, + "max": None, + "last_reset": None, + } + expected_stats_wind_direction = [ + expected_stats_wind_direction1, + expected_stats_wind_direction2, + ] + # Test statistics_during_period stats = statistics_during_period( - hass, zero, period="5minute", statistic_ids={"sensor.test1", "sensor.test2"} + hass, + zero, + period="5minute", + statistic_ids={"sensor.test1", "sensor.test2", "sensor.wind_direction"}, ) - assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2} + assert stats == { + "sensor.test1": expected_stats1, + "sensor.test2": expected_stats2, + "sensor.wind_direction": expected_stats_wind_direction, + } # Test statistics_during_period with a far future start and end date future = dt_util.as_utc(dt_util.parse_datetime("2221-11-01 00:00:00")) @@ -181,7 +216,7 @@ async def test_compile_hourly_statistics( future, end_time=future, period="5minute", - statistic_ids={"sensor.test1", "sensor.test2"}, + statistic_ids={"sensor.test1", "sensor.test2", "sensor.wind_direction"}, ) assert stats == {} @@ -191,9 +226,13 @@ async def test_compile_hourly_statistics( zero, end_time=future, period="5minute", - statistic_ids={"sensor.test1", "sensor.test2"}, + statistic_ids={"sensor.test1", "sensor.test2", "sensor.wind_direction"}, ) - assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2} + assert stats == { + "sensor.test1": expected_stats1, + "sensor.test2": expected_stats2, + "sensor.wind_direction": expected_stats_wind_direction, + } stats = statistics_during_period( hass, zero, statistic_ids={"sensor.test2"}, period="5minute" @@ -206,32 +245,39 @@ async def test_compile_hourly_statistics( assert stats == {} # Test get_last_short_term_statistics and get_latest_short_term_statistics - stats = get_last_short_term_statistics( - hass, - 0, - "sensor.test1", - True, - {"last_reset", "max", "mean", "min", "state", "sum"}, - ) - assert stats == {} + for sensor, expected in ( + ("sensor.test1", expected_2), + ("sensor.wind_direction", expected_stats_wind_direction2), + ): + stats = get_last_short_term_statistics( + hass, + 0, + sensor, + True, + {"last_reset", "max", "mean", "min", "state", "sum"}, + ) + assert stats == {} - stats = get_last_short_term_statistics( - hass, - 1, - "sensor.test1", - True, - {"last_reset", "max", "mean", "min", "state", "sum"}, - ) - assert stats == {"sensor.test1": [expected_2]} + stats = get_last_short_term_statistics( + hass, + 1, + sensor, + True, + {"last_reset", "max", "mean", "min", "state", "sum"}, + ) + assert stats == {sensor: [expected]} with session_scope(hass=hass, read_only=True) as session: stats = get_latest_short_term_statistics_with_session( hass, session, - {"sensor.test1"}, + {"sensor.test1", "sensor.wind_direction"}, {"last_reset", "max", "mean", "min", "state", "sum"}, ) - assert stats == {"sensor.test1": [expected_2]} + assert stats == { + "sensor.test1": [expected_2], + "sensor.wind_direction": [expected_stats_wind_direction2], + } # Now wipe the latest_short_term_statistics_ids table and test again # to make sure we can rebuild the missing data @@ -241,13 +287,15 @@ async def test_compile_hourly_statistics( stats = get_latest_short_term_statistics_with_session( hass, session, - {"sensor.test1"}, + {"sensor.test1", "sensor.wind_direction"}, {"last_reset", "max", "mean", "min", "state", "sum"}, ) - assert stats == {"sensor.test1": [expected_2]} + assert stats == { + "sensor.test1": [expected_2], + "sensor.wind_direction": [expected_stats_wind_direction2], + } metadata = get_metadata(hass, statistic_ids={"sensor.test1"}) - with session_scope(hass=hass, read_only=True) as session: stats = get_latest_short_term_statistics_with_session( hass, @@ -258,23 +306,44 @@ async def test_compile_hourly_statistics( ) assert stats == {"sensor.test1": [expected_2]} - stats = get_last_short_term_statistics( - hass, - 2, - "sensor.test1", - True, - {"last_reset", "max", "mean", "min", "state", "sum"}, + # Test with multiple metadata ids + metadata = get_metadata( + hass, statistic_ids={"sensor.test1", "sensor.wind_direction"} ) - assert stats == {"sensor.test1": expected_stats1[::-1]} + with session_scope(hass=hass, read_only=True) as session: + stats = get_latest_short_term_statistics_with_session( + hass, + session, + {"sensor.test1", "sensor.wind_direction"}, + {"last_reset", "max", "mean", "min", "state", "sum"}, + metadata=metadata, + ) + assert stats == { + "sensor.test1": [expected_2], + "sensor.wind_direction": [expected_stats_wind_direction2], + } - stats = get_last_short_term_statistics( - hass, - 3, - "sensor.test1", - True, - {"last_reset", "max", "mean", "min", "state", "sum"}, - ) - assert stats == {"sensor.test1": expected_stats1[::-1]} + for sensor, expected in ( + ("sensor.test1", expected_stats1[::-1]), + ("sensor.wind_direction", expected_stats_wind_direction[::-1]), + ): + stats = get_last_short_term_statistics( + hass, + 2, + sensor, + True, + {"last_reset", "max", "mean", "min", "state", "sum"}, + ) + assert stats == {sensor: expected} + + stats = get_last_short_term_statistics( + hass, + 3, + sensor, + True, + {"last_reset", "max", "mean", "min", "state", "sum"}, + ) + assert stats == {sensor: expected} stats = get_last_short_term_statistics( hass, @@ -291,7 +360,7 @@ async def test_compile_hourly_statistics( stats = get_latest_short_term_statistics_with_session( hass, session, - {"sensor.test1"}, + {"sensor.test1", "sensor.wind_direction"}, {"last_reset", "max", "mean", "min", "state", "sum"}, ) assert stats == {} @@ -306,7 +375,7 @@ async def test_compile_hourly_statistics( stats = get_latest_short_term_statistics_with_session( hass, session, - {"sensor.test1"}, + {"sensor.test1", "sensor.wind_direction"}, {"last_reset", "max", "mean", "min", "state", "sum"}, ) assert stats == {} @@ -460,15 +529,35 @@ async def test_rename_entity( expected_stats1 = [expected_1] expected_stats2 = [expected_1] expected_stats99 = [expected_1] + expected_stats_wind_direction = [ + { + "start": process_timestamp(zero).timestamp(), + "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(358.6387003873801), + "min": None, + "max": None, + "last_reset": None, + "state": None, + "sum": None, + } + ] stats = statistics_during_period(hass, zero, period="5minute") - assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2} + assert stats == { + "sensor.test1": expected_stats1, + "sensor.test2": expected_stats2, + "sensor.wind_direction": expected_stats_wind_direction, + } entity_registry.async_update_entity("sensor.test1", new_entity_id="sensor.test99") await async_wait_recording_done(hass) stats = statistics_during_period(hass, zero, period="5minute") - assert stats == {"sensor.test99": expected_stats99, "sensor.test2": expected_stats2} + assert stats == { + "sensor.test99": expected_stats99, + "sensor.test2": expected_stats2, + "sensor.wind_direction": expected_stats_wind_direction, + } async def test_statistics_during_period_set_back_compat( @@ -544,9 +633,25 @@ async def test_rename_entity_collision( } expected_stats1 = [expected_1] expected_stats2 = [expected_1] + expected_stats_wind_direction = [ + { + "start": process_timestamp(zero).timestamp(), + "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(358.6387003873801), + "min": None, + "max": None, + "last_reset": None, + "state": None, + "sum": None, + } + ] stats = statistics_during_period(hass, zero, period="5minute") - assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2} + assert stats == { + "sensor.test1": expected_stats1, + "sensor.test2": expected_stats2, + "sensor.wind_direction": expected_stats_wind_direction, + } # Insert metadata for sensor.test99 metadata_1 = { @@ -567,7 +672,11 @@ async def test_rename_entity_collision( # Statistics failed to migrate due to the collision stats = statistics_during_period(hass, zero, period="5minute") - assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2} + assert stats == { + "sensor.test1": expected_stats1, + "sensor.test2": expected_stats2, + "sensor.wind_direction": expected_stats_wind_direction, + } # Verify the safeguard in the states meta manager was hit assert ( @@ -631,9 +740,25 @@ async def test_rename_entity_collision_states_meta_check_disabled( } expected_stats1 = [expected_1] expected_stats2 = [expected_1] + expected_stats_wind_direction = [ + { + "start": process_timestamp(zero).timestamp(), + "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(358.6387003873801), + "min": None, + "max": None, + "last_reset": None, + "state": None, + "sum": None, + } + ] stats = statistics_during_period(hass, zero, period="5minute") - assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2} + assert stats == { + "sensor.test1": expected_stats1, + "sensor.test2": expected_stats2, + "sensor.wind_direction": expected_stats_wind_direction, + } # Insert metadata for sensor.test99 metadata_1 = { @@ -660,7 +785,11 @@ async def test_rename_entity_collision_states_meta_check_disabled( # Statistics failed to migrate due to the collision stats = statistics_during_period(hass, zero, period="5minute") - assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2} + assert stats == { + "sensor.test1": expected_stats1, + "sensor.test2": expected_stats2, + "sensor.wind_direction": expected_stats_wind_direction, + } # Verify the filter_unique_constraint_integrity_error safeguard was hit assert "Blocked attempt to insert duplicated statistic rows" in caplog.text @@ -786,6 +915,7 @@ async def test_import_statistics( { "display_unit_of_measurement": "kWh", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "statistic_id": statistic_id, "name": "Total imported energy", @@ -800,6 +930,7 @@ async def test_import_statistics( 1, { "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": "Total imported energy", "source": source, @@ -876,6 +1007,7 @@ async def test_import_statistics( { "display_unit_of_measurement": "kWh", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "statistic_id": statistic_id, "name": "Total imported energy renamed", @@ -890,6 +1022,7 @@ async def test_import_statistics( 1, { "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": "Total imported energy renamed", "source": source, diff --git a/tests/components/recorder/test_websocket_api.py b/tests/components/recorder/test_websocket_api.py index a4e35bc8753..a4e4fe45db1 100644 --- a/tests/components/recorder/test_websocket_api.py +++ b/tests/components/recorder/test_websocket_api.py @@ -1,11 +1,14 @@ """The tests for sensor recorder platform.""" +from collections.abc import Iterable import datetime from datetime import timedelta +import math from statistics import fmean import sys from unittest.mock import ANY, patch +from _pytest.python_api import ApproxBase from freezegun import freeze_time from freezegun.api import FrozenDateTimeFactory import pytest @@ -13,7 +16,14 @@ import pytest from homeassistant.components import recorder from homeassistant.components.recorder import Recorder from homeassistant.components.recorder.db_schema import Statistics, StatisticsShortTerm +from homeassistant.components.recorder.models import ( + StatisticData, + StatisticMeanType, + StatisticMetaData, +) from homeassistant.components.recorder.statistics import ( + DEG_TO_RAD, + RAD_TO_DEG, async_add_external_statistics, get_last_statistics, get_latest_short_term_statistics_with_session, @@ -24,6 +34,7 @@ from homeassistant.components.recorder.statistics import ( from homeassistant.components.recorder.util import session_scope from homeassistant.components.recorder.websocket_api import UNIT_SCHEMA from homeassistant.components.sensor import UNIT_CONVERTERS +from homeassistant.const import DEGREE from homeassistant.core import HomeAssistant from homeassistant.helpers import recorder as recorder_helper from homeassistant.setup import async_setup_component @@ -247,12 +258,12 @@ async def test_statistics_during_period( @pytest.mark.freeze_time(datetime.datetime(2022, 10, 21, 7, 25, tzinfo=datetime.UTC)) +@pytest.mark.usefixtures("recorder_mock") @pytest.mark.parametrize("offset", [0, 1, 2]) async def test_statistic_during_period( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - offset, + offset: int, ) -> None: """Test statistic_during_period.""" now = dt_util.utcnow() @@ -307,7 +318,7 @@ async def test_statistic_during_period( ) imported_metadata = { - "has_mean": False, + "has_mean": True, "has_sum": True, "name": "Total imported energy", "source": "recorder", @@ -655,7 +666,7 @@ async def test_statistic_during_period( hass, session, {"sensor.test"}, - {"last_reset", "max", "mean", "min", "state", "sum"}, + {"last_reset", "state", "sum"}, ) start = imported_stats_5min[-1]["start"].timestamp() end = start + (5 * 60) @@ -672,18 +683,376 @@ async def test_statistic_during_period( } +def _circular_mean(values: Iterable[StatisticData]) -> dict[str, float]: + sin_sum = 0 + cos_sum = 0 + for x in values: + mean = x.get("mean") + assert mean is not None + sin_sum += math.sin(mean * DEG_TO_RAD) + cos_sum += math.cos(mean * DEG_TO_RAD) + + return { + "mean": (RAD_TO_DEG * math.atan2(sin_sum, cos_sum)) % 360, + "mean_weight": math.sqrt(sin_sum**2 + cos_sum**2), + } + + +def _circular_mean_approx(values: Iterable[StatisticData]) -> ApproxBase: + return pytest.approx(_circular_mean(values)["mean"]) + + +@pytest.mark.freeze_time(datetime.datetime(2022, 10, 21, 7, 25, tzinfo=datetime.UTC)) +@pytest.mark.usefixtures("recorder_mock") +@pytest.mark.parametrize("offset", [0, 1, 2]) +async def test_statistic_during_period_circular_mean( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + offset: int, +) -> None: + """Test statistic_during_period.""" + now = dt_util.utcnow() + + await async_recorder_block_till_done(hass) + client = await hass_ws_client() + + zero = now + start = zero.replace(minute=offset * 5, second=0, microsecond=0) + timedelta( + hours=-3 + ) + + imported_stats_5min: list[StatisticData] = [ + { + "start": (start + timedelta(minutes=5 * i)), + "mean": (123.456 * i) % 360, + "mean_weight": 1, + } + for i in range(39) + ] + + imported_stats = [] + slice_end = 12 - offset + imported_stats.append( + { + "start": imported_stats_5min[0]["start"].replace(minute=0), + **_circular_mean(imported_stats_5min[0:slice_end]), + } + ) + for i in range(2): + slice_start = i * 12 + (12 - offset) + slice_end = (i + 1) * 12 + (12 - offset) + assert imported_stats_5min[slice_start]["start"].minute == 0 + imported_stats.append( + { + "start": imported_stats_5min[slice_start]["start"], + **_circular_mean(imported_stats_5min[slice_start:slice_end]), + } + ) + + imported_metadata: StatisticMetaData = { + "mean_type": StatisticMeanType.CIRCULAR, + "has_sum": False, + "name": "Wind direction", + "source": "recorder", + "statistic_id": "sensor.test", + "unit_of_measurement": DEGREE, + } + + recorder.get_instance(hass).async_import_statistics( + imported_metadata, + imported_stats, + Statistics, + ) + recorder.get_instance(hass).async_import_statistics( + imported_metadata, + imported_stats_5min, + StatisticsShortTerm, + ) + await async_wait_recording_done(hass) + + metadata = get_metadata(hass, statistic_ids={"sensor.test"}) + metadata_id = metadata["sensor.test"][0] + run_cache = get_short_term_statistics_run_cache(hass) + # Verify the import of the short term statistics + # also updates the run cache + assert run_cache.get_latest_ids({metadata_id}) is not None + + # No data for this period yet + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "fixed_period": { + "start_time": now.isoformat(), + "end_time": now.isoformat(), + }, + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "max": None, + "mean": None, + "min": None, + "change": None, + } + + # This should include imported_statistics_5min[:] + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min), + "max": None, + "min": None, + "change": None, + } + + # This should also include imported_statistics_5min[:] + start_time = ( + dt_util.parse_datetime("2022-10-21T04:00:00+00:00") + + timedelta(minutes=5 * offset) + ).isoformat() + end_time = ( + dt_util.parse_datetime("2022-10-21T07:15:00+00:00") + + timedelta(minutes=5 * offset) + ).isoformat() + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "statistic_id": "sensor.test", + "fixed_period": { + "start_time": start_time, + "end_time": end_time, + }, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min), + "max": None, + "min": None, + "change": None, + } + + # This should also include imported_statistics_5min[:] + start_time = ( + dt_util.parse_datetime("2022-10-21T04:00:00+00:00") + + timedelta(minutes=5 * offset) + ).isoformat() + end_time = ( + dt_util.parse_datetime("2022-10-21T08:20:00+00:00") + + timedelta(minutes=5 * offset) + ).isoformat() + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "statistic_id": "sensor.test", + "fixed_period": { + "start_time": start_time, + "end_time": end_time, + }, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min), + "max": None, + "min": None, + "change": None, + } + + # This should include imported_statistics_5min[26:] + start_time = ( + dt_util.parse_datetime("2022-10-21T06:10:00+00:00") + + timedelta(minutes=5 * offset) + ).isoformat() + assert imported_stats_5min[26]["start"].isoformat() == start_time + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "fixed_period": { + "start_time": start_time, + }, + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min[26:]), + "max": None, + "min": None, + "change": None, + } + + # This should also include imported_statistics_5min[26:] + start_time = ( + dt_util.parse_datetime("2022-10-21T06:09:00+00:00") + + timedelta(minutes=5 * offset) + ).isoformat() + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "fixed_period": { + "start_time": start_time, + }, + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min[26:]), + "max": None, + "min": None, + "change": None, + } + + # This should include imported_statistics_5min[:26] + end_time = ( + dt_util.parse_datetime("2022-10-21T06:10:00+00:00") + + timedelta(minutes=5 * offset) + ).isoformat() + assert imported_stats_5min[26]["start"].isoformat() == end_time + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "fixed_period": { + "end_time": end_time, + }, + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min[:26]), + "max": None, + "min": None, + "change": None, + } + + # This should include imported_statistics_5min[26:32] (less than a full hour) + start_time = ( + dt_util.parse_datetime("2022-10-21T06:10:00+00:00") + + timedelta(minutes=5 * offset) + ).isoformat() + assert imported_stats_5min[26]["start"].isoformat() == start_time + end_time = ( + dt_util.parse_datetime("2022-10-21T06:40:00+00:00") + + timedelta(minutes=5 * offset) + ).isoformat() + assert imported_stats_5min[32]["start"].isoformat() == end_time + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "fixed_period": { + "start_time": start_time, + "end_time": end_time, + }, + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min[26:32]), + "max": None, + "min": None, + "change": None, + } + + # This should include imported_statistics[2:] + imported_statistics_5min[36:] + start_time = "2022-10-21T06:00:00+00:00" + assert imported_stats_5min[24 - offset]["start"].isoformat() == start_time + assert imported_stats[2]["start"].isoformat() == start_time + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "fixed_period": { + "start_time": start_time, + }, + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min[24 - offset :]), + "max": None, + "min": None, + "change": None, + } + + # This should also include imported_statistics[2:] + imported_statistics_5min[36:] + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "rolling_window": { + "duration": {"hours": 1, "minutes": 25}, + }, + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min[24 - offset :]), + "max": None, + "min": None, + "change": None, + } + + # This should include imported_statistics[2:3] + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "rolling_window": { + "duration": {"hours": 1}, + "offset": {"minutes": -25}, + }, + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + slice_start = 24 - offset + slice_end = 36 - offset + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min[slice_start:slice_end]), + "max": None, + "min": None, + "change": None, + } + + # Test we can get only selected types + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "statistic_id": "sensor.test", + "types": ["mean"], + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min), + } + + @pytest.mark.freeze_time(datetime.datetime(2022, 10, 21, 7, 25, tzinfo=datetime.UTC)) async def test_statistic_during_period_hole( recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test statistic_during_period when there are holes in the data.""" - stat_id = 1 - - def next_id(): - nonlocal stat_id - stat_id += 1 - return stat_id - now = dt_util.utcnow() await async_recorder_block_till_done(hass) @@ -704,7 +1073,7 @@ async def test_statistic_during_period_hole( ] imported_metadata = { - "has_mean": False, + "has_mean": True, "has_sum": True, "name": "Total imported energy", "source": "recorder", @@ -830,6 +1199,156 @@ async def test_statistic_during_period_hole( } +@pytest.mark.freeze_time(datetime.datetime(2022, 10, 21, 7, 25, tzinfo=datetime.UTC)) +@pytest.mark.usefixtures("recorder_mock") +async def test_statistic_during_period_hole_circular_mean( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test statistic_during_period when there are holes in the data.""" + now = dt_util.utcnow() + + await async_recorder_block_till_done(hass) + client = await hass_ws_client() + + zero = now + start = zero.replace(minute=0, second=0, microsecond=0) + timedelta(hours=-18) + + imported_stats: list[StatisticData] = [ + { + "start": (start + timedelta(hours=3 * i)), + "mean": (123.456 * i) % 360, + "mean_weight": 1, + } + for i in range(6) + ] + + imported_metadata: StatisticMetaData = { + "mean_type": StatisticMeanType.CIRCULAR, + "has_sum": False, + "name": "Wind direction", + "source": "recorder", + "statistic_id": "sensor.test", + "unit_of_measurement": DEGREE, + } + + recorder.get_instance(hass).async_import_statistics( + imported_metadata, + imported_stats, + Statistics, + ) + await async_wait_recording_done(hass) + + # This should include imported_stats[:] + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats[:]), + "max": None, + "min": None, + "change": None, + } + + # This should also include imported_stats[:] + start_time = "2022-10-20T13:00:00+00:00" + end_time = "2022-10-21T05:00:00+00:00" + assert imported_stats[0]["start"].isoformat() == start_time + assert imported_stats[-1]["start"].isoformat() < end_time + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "statistic_id": "sensor.test", + "fixed_period": { + "start_time": start_time, + "end_time": end_time, + }, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats[:]), + "max": None, + "min": None, + "change": None, + } + + # This should also include imported_stats[:] + start_time = "2022-10-20T13:00:00+00:00" + end_time = "2022-10-21T08:20:00+00:00" + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "statistic_id": "sensor.test", + "fixed_period": { + "start_time": start_time, + "end_time": end_time, + }, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats[:]), + "max": None, + "min": None, + "change": None, + } + + # This should include imported_stats[1:4] + start_time = "2022-10-20T16:00:00+00:00" + end_time = "2022-10-20T23:00:00+00:00" + assert imported_stats[1]["start"].isoformat() == start_time + assert imported_stats[3]["start"].isoformat() < end_time + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "statistic_id": "sensor.test", + "fixed_period": { + "start_time": start_time, + "end_time": end_time, + }, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats[1:4]), + "max": None, + "min": None, + "change": None, + } + + # This should also include imported_stats[1:4] + start_time = "2022-10-20T15:00:00+00:00" + end_time = "2022-10-21T00:00:00+00:00" + assert imported_stats[1]["start"].isoformat() > start_time + assert imported_stats[3]["start"].isoformat() < end_time + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "statistic_id": "sensor.test", + "fixed_period": { + "start_time": start_time, + "end_time": end_time, + }, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats[1:4]), + "max": None, + "min": None, + "change": None, + } + + @pytest.mark.parametrize( "frozen_time", [ @@ -897,7 +1416,7 @@ async def test_statistic_during_period_partial_overlap( statId = "sensor.test_overlapping" imported_metadata = { - "has_mean": False, + "has_mean": True, "has_sum": True, "name": "Total imported energy overlapping", "source": "recorder", @@ -1766,6 +2285,7 @@ async def test_list_statistic_ids( """Test list_statistic_ids.""" now = get_start_time(dt_util.utcnow()) has_mean = attributes["state_class"] == "measurement" + mean_type = StatisticMeanType.ARITHMETIC if has_mean else StatisticMeanType.NONE has_sum = not has_mean hass.config.units = units @@ -1791,6 +2311,7 @@ async def test_list_statistic_ids( "statistic_id": "sensor.test", "display_unit_of_measurement": display_unit, "has_mean": has_mean, + "mean_type": mean_type, "has_sum": has_sum, "name": None, "source": "recorder", @@ -1813,6 +2334,7 @@ async def test_list_statistic_ids( "statistic_id": "sensor.test", "display_unit_of_measurement": display_unit, "has_mean": has_mean, + "mean_type": mean_type, "has_sum": has_sum, "name": None, "source": "recorder", @@ -1838,6 +2360,7 @@ async def test_list_statistic_ids( "statistic_id": "sensor.test", "display_unit_of_measurement": display_unit, "has_mean": has_mean, + "mean_type": mean_type, "has_sum": has_sum, "name": None, "source": "recorder", @@ -1859,6 +2382,7 @@ async def test_list_statistic_ids( "statistic_id": "sensor.test", "display_unit_of_measurement": display_unit, "has_mean": has_mean, + "mean_type": mean_type, "has_sum": has_sum, "name": None, "source": "recorder", @@ -1939,6 +2463,7 @@ async def test_list_statistic_ids_unit_change( """Test list_statistic_ids.""" now = get_start_time(dt_util.utcnow()) has_mean = attributes["state_class"] == "measurement" + mean_type = StatisticMeanType.ARITHMETIC if has_mean else StatisticMeanType.NONE has_sum = not has_mean await async_setup_component(hass, "sensor", {}) @@ -1966,6 +2491,7 @@ async def test_list_statistic_ids_unit_change( "statistic_id": "sensor.test", "display_unit_of_measurement": statistics_unit, "has_mean": has_mean, + "mean_type": mean_type, "has_sum": has_sum, "name": None, "source": "recorder", @@ -1987,6 +2513,7 @@ async def test_list_statistic_ids_unit_change( "statistic_id": "sensor.test", "display_unit_of_measurement": display_unit, "has_mean": has_mean, + "mean_type": mean_type, "has_sum": has_sum, "name": None, "source": "recorder", @@ -2208,6 +2735,7 @@ async def test_update_statistics_metadata( "statistic_id": "sensor.test", "display_unit_of_measurement": "kW", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2235,6 +2763,7 @@ async def test_update_statistics_metadata( "statistic_id": "sensor.test", "display_unit_of_measurement": new_display_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2324,6 +2853,7 @@ async def test_change_statistics_unit( "statistic_id": "sensor.test", "display_unit_of_measurement": "kW", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2375,6 +2905,7 @@ async def test_change_statistics_unit( "statistic_id": "sensor.test", "display_unit_of_measurement": "kW", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2428,6 +2959,7 @@ async def test_change_statistics_unit( "statistic_id": "sensor.test", "display_unit_of_measurement": "kW", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2455,6 +2987,7 @@ async def test_change_statistics_unit_errors( "statistic_id": "sensor.test", "display_unit_of_measurement": "kW", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2774,6 +3307,7 @@ async def test_get_statistics_metadata( """Test get_statistics_metadata.""" now = get_start_time(dt_util.utcnow()) has_mean = attributes["state_class"] == "measurement" + mean_type = StatisticMeanType.ARITHMETIC if has_mean else StatisticMeanType.NONE has_sum = not has_mean hass.config.units = units @@ -2843,6 +3377,7 @@ async def test_get_statistics_metadata( "statistic_id": "test:total_gas", "display_unit_of_measurement": unit, "has_mean": has_mean, + "mean_type": mean_type, "has_sum": has_sum, "name": "Total imported energy", "source": "test", @@ -2874,6 +3409,7 @@ async def test_get_statistics_metadata( "statistic_id": "sensor.test", "display_unit_of_measurement": attributes["unit_of_measurement"], "has_mean": has_mean, + "mean_type": mean_type, "has_sum": has_sum, "name": None, "source": "recorder", @@ -2901,6 +3437,7 @@ async def test_get_statistics_metadata( "statistic_id": "sensor.test", "display_unit_of_measurement": attributes["unit_of_measurement"], "has_mean": has_mean, + "mean_type": mean_type, "has_sum": has_sum, "name": None, "source": "recorder", @@ -2995,6 +3532,7 @@ async def test_import_statistics( { "display_unit_of_measurement": "kWh", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "statistic_id": statistic_id, "name": "Total imported energy", @@ -3009,6 +3547,7 @@ async def test_import_statistics( 1, { "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": "Total imported energy", "source": source, @@ -3213,6 +3752,7 @@ async def test_adjust_sum_statistics_energy( { "display_unit_of_measurement": "kWh", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "statistic_id": statistic_id, "name": "Total imported energy", @@ -3227,6 +3767,7 @@ async def test_adjust_sum_statistics_energy( 1, { "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": "Total imported energy", "source": source, @@ -3406,6 +3947,7 @@ async def test_adjust_sum_statistics_gas( { "display_unit_of_measurement": "m³", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "statistic_id": statistic_id, "name": "Total imported energy", @@ -3420,6 +3962,7 @@ async def test_adjust_sum_statistics_gas( 1, { "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": "Total imported energy", "source": source, @@ -3617,6 +4160,7 @@ async def test_adjust_sum_statistics_errors( { "display_unit_of_measurement": state_unit, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "statistic_id": statistic_id, "name": "Total imported energy", @@ -3631,6 +4175,7 @@ async def test_adjust_sum_statistics_errors( 1, { "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": "Total imported energy", "source": source, diff --git a/tests/components/remote_calendar/test_config_flow.py b/tests/components/remote_calendar/test_config_flow.py index 626bc2c6e03..9eb9cb40134 100644 --- a/tests/components/remote_calendar/test_config_flow.py +++ b/tests/components/remote_calendar/test_config_flow.py @@ -45,6 +45,35 @@ async def test_form_import_ics(hass: HomeAssistant, ics_content: str) -> None: } +@respx.mock +async def test_form_import_webcal(hass: HomeAssistant, ics_content: str) -> None: + """Test we get the import form.""" + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=ics_content, + ) + ) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: "webcal://some.calendar.com/calendar.ics", + }, + ) + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == CALENDAR_NAME + assert result2["data"] == { + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: CALENDER_URL, + } + + @pytest.mark.parametrize( ("side_effect"), [ diff --git a/tests/components/reolink/conftest.py b/tests/components/reolink/conftest.py index f2474d640d8..21acced3d1d 100644 --- a/tests/components/reolink/conftest.py +++ b/tests/components/reolink/conftest.py @@ -35,6 +35,7 @@ TEST_PASSWORD = "password" TEST_PASSWORD2 = "new_password" TEST_MAC = "aa:bb:cc:dd:ee:ff" TEST_MAC2 = "ff:ee:dd:cc:bb:aa" +TEST_MAC_CAM = "11:22:33:44:55:66" DHCP_FORMATTED_MAC = "aabbccddeeff" TEST_UID = "ABC1234567D89EFG" TEST_UID_CAM = "DEF7654321D89GHT" @@ -142,6 +143,7 @@ def reolink_connect_class() -> Generator[MagicMock]: # Disable tcp push by default for tests host_mock.baichuan.port = TEST_BC_PORT host_mock.baichuan.events_active = False + host_mock.baichuan.mac_address.return_value = TEST_MAC_CAM host_mock.baichuan.privacy_mode.return_value = False host_mock.baichuan.day_night_state.return_value = "day" host_mock.baichuan.subscribe_events.side_effect = ReolinkError("Test error") diff --git a/tests/components/reolink/test_number.py b/tests/components/reolink/test_number.py index c6507fa36c1..dd70376d658 100644 --- a/tests/components/reolink/test_number.py +++ b/tests/components/reolink/test_number.py @@ -67,6 +67,48 @@ async def test_number( reolink_connect.set_volume.reset_mock(side_effect=True) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_smart_ai_number( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test number entity with smart ai sensitivity.""" + reolink_connect.baichuan.smart_ai_sensitivity.return_value = 80 + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.NUMBER]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.NUMBER}.{TEST_NVR_NAME}_AI_crossline_zone1_sensitivity" + + assert hass.states.get(entity_id).state == "80" + + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 50}, + blocking=True, + ) + reolink_connect.baichuan.set_smart_ai.assert_called_with( + 0, "crossline", 0, sensitivity=50 + ) + + reolink_connect.baichuan.set_smart_ai.side_effect = InvalidParameterError( + "Test error" + ) + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 50}, + blocking=True, + ) + + reolink_connect.baichuan.set_smart_ai.reset_mock(side_effect=True) + + async def test_host_number( hass: HomeAssistant, config_entry: MockConfigEntry, diff --git a/tests/components/reolink/test_update.py b/tests/components/reolink/test_update.py index a6cfe862963..d48362516b8 100644 --- a/tests/components/reolink/test_update.py +++ b/tests/components/reolink/test_update.py @@ -6,7 +6,7 @@ from unittest.mock import MagicMock, patch from freezegun.api import FrozenDateTimeFactory import pytest -from reolink_aio.exceptions import ReolinkError +from reolink_aio.exceptions import ApiError, ReolinkError from reolink_aio.software_version import NewSoftwareVersion from homeassistant.components.reolink.update import POLL_AFTER_INSTALL, POLL_PROGRESS @@ -144,6 +144,17 @@ async def test_update_firm( blocking=True, ) + reolink_connect.update_firmware.side_effect = ApiError( + "Test error", translation_key="firmware_rate_limit" + ) + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + UPDATE_DOMAIN, + SERVICE_INSTALL, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + # test _async_update_future reolink_connect.camera_sw_version.return_value = "v3.3.0.226_23031644" reolink_connect.firmware_update_available.return_value = False diff --git a/tests/components/reolink/test_util.py b/tests/components/reolink/test_util.py index f66f4682b98..ef66d471801 100644 --- a/tests/components/reolink/test_util.py +++ b/tests/components/reolink/test_util.py @@ -38,51 +38,59 @@ from tests.common import MockConfigEntry [ ( ApiError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="api_error"), + ), + ( + ApiError("Test error", translation_key="firmware_rate_limit"), + HomeAssistantError(translation_key="firmware_rate_limit"), + ), + ( + ApiError("Test error", translation_key="not_in_strings.json"), + HomeAssistantError(translation_key="api_error"), ), ( CredentialsInvalidError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="invalid_credentials"), ), ( InvalidContentTypeError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="invalid_content_type"), ), ( InvalidParameterError("Test error"), - ServiceValidationError, + ServiceValidationError(translation_key="invalid_parameter"), ), ( LoginError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="login_error"), ), ( NoDataError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="no_data"), ), ( NotSupportedError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="not_supported"), ), ( ReolinkConnectionError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="connection_error"), ), ( ReolinkError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="unexpected"), ), ( ReolinkTimeoutError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="timeout"), ), ( SubscriptionError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="subscription_error"), ), ( UnexpectedDataError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="unexpected_data"), ), ], ) @@ -91,7 +99,7 @@ async def test_try_function( config_entry: MockConfigEntry, reolink_connect: MagicMock, side_effect: ReolinkError, - expected: Exception, + expected: HomeAssistantError, ) -> None: """Test try_function error translations using number entity.""" reolink_connect.volume.return_value = 80 @@ -104,7 +112,7 @@ async def test_try_function( entity_id = f"{Platform.NUMBER}.{TEST_NVR_NAME}_volume" reolink_connect.set_volume.side_effect = side_effect - with pytest.raises(expected): + with pytest.raises(expected.__class__) as err: await hass.services.async_call( NUMBER_DOMAIN, SERVICE_SET_VALUE, @@ -112,4 +120,6 @@ async def test_try_function( blocking=True, ) + assert err.value.translation_key == expected.translation_key + reolink_connect.set_volume.reset_mock(side_effect=True) diff --git a/tests/components/roborock/conftest.py b/tests/components/roborock/conftest.py index 758b002f534..d807e35710b 100644 --- a/tests/components/roborock/conftest.py +++ b/tests/components/roborock/conftest.py @@ -3,10 +3,9 @@ from collections.abc import Generator from copy import deepcopy import pathlib -import shutil +import tempfile from typing import Any from unittest.mock import Mock, patch -import uuid import pytest from roborock import RoborockCategory, RoomMapping @@ -19,7 +18,6 @@ from homeassistant.components.roborock.const import ( CONF_USER_DATA, DOMAIN, ) -from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_USERNAME, Platform from homeassistant.core import HomeAssistant @@ -30,6 +28,7 @@ from .mock_data import ( MULTI_MAP_LIST, NETWORK_INFO, PROP, + ROBOROCK_RRUID, SCENES, USER_DATA, USER_EMAIL, @@ -190,18 +189,28 @@ def bypass_api_fixture_v1_only(bypass_api_fixture) -> None: yield +@pytest.fixture(name="config_entry_data") +def config_entry_data_fixture() -> dict[str, Any]: + """Fixture that returns the unique id for the config entry.""" + return { + CONF_USERNAME: USER_EMAIL, + CONF_USER_DATA: USER_DATA.as_dict(), + CONF_BASE_URL: BASE_URL, + } + + @pytest.fixture -def mock_roborock_entry(hass: HomeAssistant) -> MockConfigEntry: +def mock_roborock_entry( + hass: HomeAssistant, config_entry_data: dict[str, Any] +) -> MockConfigEntry: """Create a Roborock Entry that has not been setup.""" mock_entry = MockConfigEntry( domain=DOMAIN, title=USER_EMAIL, - data={ - CONF_USERNAME: USER_EMAIL, - CONF_USER_DATA: USER_DATA.as_dict(), - CONF_BASE_URL: BASE_URL, - }, - unique_id=USER_EMAIL, + data=config_entry_data, + unique_id=ROBOROCK_RRUID, + version=1, + minor_version=2, ) mock_entry.add_to_hass(hass) return mock_entry @@ -213,42 +222,40 @@ def mock_platforms() -> list[Platform]: return [] +@pytest.fixture(autouse=True) +async def mock_patforms_fixture( + hass: HomeAssistant, + platforms: list[Platform], +) -> Generator[None]: + """Set up the Roborock platform.""" + with patch("homeassistant.components.roborock.PLATFORMS", platforms): + yield + + @pytest.fixture async def setup_entry( hass: HomeAssistant, bypass_api_fixture, mock_roborock_entry: MockConfigEntry, - cleanup_map_storage: pathlib.Path, - platforms: list[Platform], ) -> Generator[MockConfigEntry]: """Set up the Roborock platform.""" - with patch("homeassistant.components.roborock.PLATFORMS", platforms): - await hass.config_entries.async_setup(mock_roborock_entry.entry_id) - await hass.async_block_till_done() - yield mock_roborock_entry + await hass.config_entries.async_setup(mock_roborock_entry.entry_id) + await hass.async_block_till_done() + return mock_roborock_entry -@pytest.fixture(autouse=True) -async def cleanup_map_storage(cleanup_map_storage_manual) -> Generator[pathlib.Path]: - """Test cleanup, remove any map storage persisted during the test.""" - return cleanup_map_storage_manual - - -@pytest.fixture -async def cleanup_map_storage_manual( - hass: HomeAssistant, mock_roborock_entry: MockConfigEntry +@pytest.fixture(autouse=True, name="storage_path") +async def storage_path_fixture( + hass: HomeAssistant, ) -> Generator[pathlib.Path]: """Test cleanup, remove any map storage persisted during the test.""" - tmp_path = str(uuid.uuid4()) - with patch( - "homeassistant.components.roborock.roborock_storage.STORAGE_PATH", new=tmp_path - ): - storage_path = ( - pathlib.Path(hass.config.path(tmp_path)) / mock_roborock_entry.entry_id - ) - yield storage_path - # We need to first unload the config entry because unloading it will - # persist any unsaved maps to storage. - if mock_roborock_entry.state is ConfigEntryState.LOADED: - await hass.config_entries.async_unload(mock_roborock_entry.entry_id) - shutil.rmtree(str(storage_path), ignore_errors=True) + with tempfile.TemporaryDirectory() as tmp_path: + + def get_storage_path(_: HomeAssistant, entry_id: str) -> pathlib.Path: + return pathlib.Path(tmp_path) / entry_id + + with patch( + "homeassistant.components.roborock.roborock_storage._storage_path_prefix", + new=get_storage_path, + ): + yield pathlib.Path(tmp_path) diff --git a/tests/components/roborock/mock_data.py b/tests/components/roborock/mock_data.py index 82b51e67f8d..cf4f167ef7f 100644 --- a/tests/components/roborock/mock_data.py +++ b/tests/components/roborock/mock_data.py @@ -28,6 +28,7 @@ USER_EMAIL = "user@domain.com" BASE_URL = "https://usiot.roborock.com" +ROBOROCK_RRUID = "roboborock-userid-abc-123" USER_DATA = UserData.from_dict( { "tuyaname": "abc123", @@ -35,7 +36,7 @@ USER_DATA = UserData.from_dict( "uid": 123456, "tokentype": "", "token": "abc123", - "rruid": "abc123", + "rruid": ROBOROCK_RRUID, "region": "us", "countrycode": "1", "country": "US", diff --git a/tests/components/roborock/test_config_flow.py b/tests/components/roborock/test_config_flow.py index abd19660fba..7958f17a696 100644 --- a/tests/components/roborock/test_config_flow.py +++ b/tests/components/roborock/test_config_flow.py @@ -16,12 +16,12 @@ from vacuum_map_parser_base.config.drawable import Drawable from homeassistant import config_entries from homeassistant.components.roborock.const import CONF_ENTRY_CODE, DOMAIN, DRAWABLES -from homeassistant.const import CONF_USERNAME +from homeassistant.const import CONF_USERNAME, Platform from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo -from .mock_data import MOCK_CONFIG, NETWORK_INFO, USER_DATA, USER_EMAIL +from .mock_data import MOCK_CONFIG, NETWORK_INFO, ROBOROCK_RRUID, USER_DATA, USER_EMAIL from tests.common import MockConfigEntry @@ -64,6 +64,7 @@ async def test_config_flow_success( ) assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["context"]["unique_id"] == ROBOROCK_RRUID assert result["title"] == USER_EMAIL assert result["data"] == MOCK_CONFIG assert result["result"] @@ -128,6 +129,7 @@ async def test_config_flow_failures_request_code( ) assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["context"]["unique_id"] == ROBOROCK_RRUID assert result["title"] == USER_EMAIL assert result["data"] == MOCK_CONFIG assert result["result"] @@ -189,6 +191,7 @@ async def test_config_flow_failures_code_login( ) assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["context"]["unique_id"] == ROBOROCK_RRUID assert result["title"] == USER_EMAIL assert result["data"] == MOCK_CONFIG assert result["result"] @@ -256,6 +259,7 @@ async def test_reauth_flow( ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" + assert mock_roborock_entry.unique_id == ROBOROCK_RRUID assert mock_roborock_entry.data["user_data"]["rriot"]["s"] == "new_password_hash" @@ -264,7 +268,8 @@ async def test_account_already_configured( bypass_api_fixture, mock_roborock_entry: MockConfigEntry, ) -> None: - """Handle the config flow and make sure it succeeds.""" + """Ensure the same account cannot be setup twice.""" + assert mock_roborock_entry.unique_id == ROBOROCK_RRUID with patch( "homeassistant.components.roborock.async_setup_entry", return_value=True ): @@ -280,10 +285,59 @@ async def test_account_already_configured( result["flow_id"], {CONF_USERNAME: USER_EMAIL} ) + assert result["step_id"] == "code" + assert result["type"] is FlowResultType.FORM + with patch( + "homeassistant.components.roborock.config_flow.RoborockApiClient.code_login", + return_value=USER_DATA, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_ENTRY_CODE: "123456"} + ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured_account" +async def test_reauth_wrong_account( + hass: HomeAssistant, + bypass_api_fixture, + mock_roborock_entry: MockConfigEntry, +) -> None: + """Ensure that reauthentication must use the same account.""" + + # Start reauth + result = mock_roborock_entry.async_start_reauth(hass) + await hass.async_block_till_done() + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + [result] = flows + assert result["step_id"] == "reauth_confirm" + + with patch( + "homeassistant.components.roborock.async_setup_entry", return_value=True + ): + with patch( + "homeassistant.components.roborock.config_flow.RoborockApiClient.request_code" + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_USERNAME: USER_EMAIL} + ) + + assert result["step_id"] == "code" + assert result["type"] is FlowResultType.FORM + new_user_data = deepcopy(USER_DATA) + new_user_data.rruid = "new_rruid" + with patch( + "homeassistant.components.roborock.config_flow.RoborockApiClient.code_login", + return_value=new_user_data, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_ENTRY_CODE: "123456"} + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "wrong_account" + + async def test_discovery_not_setup( hass: HomeAssistant, bypass_api_fixture, @@ -322,16 +376,17 @@ async def test_discovery_not_setup( ) assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["context"]["unique_id"] == ROBOROCK_RRUID assert result["title"] == USER_EMAIL assert result["data"] == MOCK_CONFIG assert result["result"] +@pytest.mark.parametrize("platforms", [[Platform.SENSOR]]) async def test_discovery_already_setup( hass: HomeAssistant, bypass_api_fixture, mock_roborock_entry: MockConfigEntry, - cleanup_map_storage_manual, ) -> None: """Handle aborting if the device is already setup.""" await hass.config_entries.async_setup(mock_roborock_entry.entry_id) @@ -347,3 +402,4 @@ async def test_discovery_already_setup( ) assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/roborock/test_coordinator.py b/tests/components/roborock/test_coordinator.py index 94976ba92f5..dec4e0a62d4 100644 --- a/tests/components/roborock/test_coordinator.py +++ b/tests/components/roborock/test_coordinator.py @@ -13,6 +13,7 @@ from homeassistant.components.roborock.const import ( V1_LOCAL_IN_CLEANING_INTERVAL, V1_LOCAL_NOT_CLEANING_INTERVAL, ) +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.util import dt as dt_util @@ -21,6 +22,12 @@ from .mock_data import PROP from tests.common import MockConfigEntry, async_fire_time_changed +@pytest.fixture +def platforms() -> list[Platform]: + """Fixture to set platforms used in the test.""" + return [Platform.SENSOR] + + @pytest.mark.parametrize( ("interval", "in_cleaning"), [ diff --git a/tests/components/roborock/test_init.py b/tests/components/roborock/test_init.py index 3d288b6479b..a1bcfc462e4 100644 --- a/tests/components/roborock/test_init.py +++ b/tests/components/roborock/test_init.py @@ -3,6 +3,7 @@ from copy import deepcopy from http import HTTPStatus import pathlib +from typing import Any from unittest.mock import patch import pytest @@ -20,7 +21,13 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceRegistry from homeassistant.setup import async_setup_component -from .mock_data import HOME_DATA, NETWORK_INFO, NETWORK_INFO_2 +from .mock_data import ( + HOME_DATA, + NETWORK_INFO, + NETWORK_INFO_2, + ROBOROCK_RRUID, + USER_EMAIL, +) from tests.common import MockConfigEntry from tests.typing import ClientSessionGenerator @@ -174,7 +181,7 @@ async def test_remove_from_hass( bypass_api_fixture, setup_entry: MockConfigEntry, hass_client: ClientSessionGenerator, - cleanup_map_storage: pathlib.Path, + storage_path: pathlib.Path, ) -> None: """Test that removing from hass removes any existing images.""" @@ -184,17 +191,18 @@ async def test_remove_from_hass( resp = await client.get("/api/image_proxy/image.roborock_s7_maxv_upstairs") assert resp.status == HTTPStatus.OK - assert not cleanup_map_storage.exists() + config_entry_storage = storage_path / setup_entry.entry_id + assert not config_entry_storage.exists() # Flush to disk await hass.config_entries.async_unload(setup_entry.entry_id) - assert cleanup_map_storage.exists() - paths = list(cleanup_map_storage.walk()) + assert config_entry_storage.exists() + paths = list(config_entry_storage.walk()) assert len(paths) == 4 # Two map image and two directories await hass.config_entries.async_remove(setup_entry.entry_id) # After removal, directories should be empty. - assert not cleanup_map_storage.exists() + assert not config_entry_storage.exists() @pytest.mark.parametrize("platforms", [[Platform.IMAGE]]) @@ -202,7 +210,7 @@ async def test_oserror_remove_image( hass: HomeAssistant, bypass_api_fixture, setup_entry: MockConfigEntry, - cleanup_map_storage: pathlib.Path, + storage_path: pathlib.Path, hass_client: ClientSessionGenerator, caplog: pytest.LogCaptureFixture, ) -> None: @@ -215,11 +223,12 @@ async def test_oserror_remove_image( assert resp.status == HTTPStatus.OK # Image content is saved when unloading - assert not cleanup_map_storage.exists() + config_entry_storage = storage_path / setup_entry.entry_id + assert not config_entry_storage.exists() await hass.config_entries.async_unload(setup_entry.entry_id) - assert cleanup_map_storage.exists() - paths = list(cleanup_map_storage.walk()) + assert config_entry_storage.exists() + paths = list(config_entry_storage.walk()) assert len(paths) == 4 # Two map image and two directories with patch( @@ -298,6 +307,7 @@ async def test_no_user_agreement( assert mock_roborock_entry.error_reason_translation_key == "no_user_agreement" +@pytest.mark.parametrize("platforms", [[Platform.SENSOR]]) async def test_stale_device( hass: HomeAssistant, bypass_api_fixture, @@ -339,6 +349,7 @@ async def test_stale_device( # therefore not deleted. +@pytest.mark.parametrize("platforms", [[Platform.SENSOR]]) async def test_no_stale_device( hass: HomeAssistant, bypass_api_fixture, @@ -367,3 +378,25 @@ async def test_no_stale_device( mock_roborock_entry.entry_id ) assert len(new_devices) == 6 # 2 for each robot, 1 for A01, 1 for Zeo + + +async def test_migrate_config_entry_unique_id( + hass: HomeAssistant, + bypass_api_fixture, + config_entry_data: dict[str, Any], +) -> None: + """Test migrating the config entry unique id.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=USER_EMAIL, + data=config_entry_data, + version=1, + minor_version=1, + ) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert config_entry.state is ConfigEntryState.LOADED + assert config_entry.unique_id == ROBOROCK_RRUID diff --git a/tests/components/roku/test_binary_sensor.py b/tests/components/roku/test_binary_sensor.py index ad27a857101..c3aec4f0968 100644 --- a/tests/components/roku/test_binary_sensor.py +++ b/tests/components/roku/test_binary_sensor.py @@ -50,7 +50,7 @@ async def test_roku_binary_sensors( assert entry.unique_id == f"{UPNP_SERIAL}_supports_ethernet" assert entry.entity_category == EntityCategory.DIAGNOSTIC assert state.state == STATE_ON - assert state.attributes.get(ATTR_FRIENDLY_NAME) == "My Roku 3 Supports ethernet" + assert state.attributes.get(ATTR_FRIENDLY_NAME) == "My Roku 3 Supports Ethernet" assert ATTR_DEVICE_CLASS not in state.attributes state = hass.states.get("binary_sensor.my_roku_3_supports_find_remote") @@ -125,7 +125,7 @@ async def test_rokutv_binary_sensors( assert entry.entity_category == EntityCategory.DIAGNOSTIC assert state.state == STATE_ON assert ( - state.attributes.get(ATTR_FRIENDLY_NAME) == '58" Onn Roku TV Supports ethernet' + state.attributes.get(ATTR_FRIENDLY_NAME) == '58" Onn Roku TV Supports Ethernet' ) assert ATTR_DEVICE_CLASS not in state.attributes diff --git a/tests/components/sensor/test_init.py b/tests/components/sensor/test_init.py index b162200f95e..9666e29579b 100644 --- a/tests/components/sensor/test_init.py +++ b/tests/components/sensor/test_init.py @@ -24,6 +24,7 @@ from homeassistant.components.sensor import ( async_rounded_state, async_update_suggested_units, ) +from homeassistant.components.sensor.const import STATE_CLASS_UNITS from homeassistant.config_entries import ConfigEntry, ConfigFlow from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, @@ -2005,6 +2006,7 @@ async def test_non_numeric_device_class_with_unit_of_measurement( SensorDeviceClass.VOLUME, SensorDeviceClass.WATER, SensorDeviceClass.WEIGHT, + SensorDeviceClass.WIND_DIRECTION, SensorDeviceClass.WIND_SPEED, ], ) @@ -2035,6 +2037,37 @@ async def test_device_classes_with_invalid_unit_of_measurement( ) in caplog.text +@pytest.mark.parametrize( + "state_class", + [SensorStateClass.MEASUREMENT_ANGLE], +) +async def test_state_classes_with_invalid_unit_of_measurement( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + state_class: SensorStateClass, +) -> None: + """Test error when unit of measurement is not valid for used state class.""" + entity0 = MockSensor( + name="Test", + native_value="1.0", + state_class=state_class, + native_unit_of_measurement="INVALID!", + ) + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) + units = { + str(unit) if unit else "no unit of measurement" + for unit in STATE_CLASS_UNITS.get(state_class, set()) + } + assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) + await hass.async_block_till_done() + + assert ( + f"Sensor sensor.test ({entity0.__class__}) is using native unit of " + "measurement 'INVALID!' which is not a valid unit " + f"for the state class ('{state_class}') it is using; expected one of {units};" + ) in caplog.text + + @pytest.mark.parametrize( ("device_class", "state_class", "unit"), [ diff --git a/tests/components/sensor/test_recorder.py b/tests/components/sensor/test_recorder.py index 1dd8fb4905a..962c0a0ef8f 100644 --- a/tests/components/sensor/test_recorder.py +++ b/tests/components/sensor/test_recorder.py @@ -1,7 +1,8 @@ """The tests for sensor recorder platform.""" -from collections.abc import Iterable +from collections.abc import Callable, Iterable from datetime import datetime, timedelta +import logging import math from statistics import mean from typing import Any, Literal @@ -26,17 +27,30 @@ from homeassistant.components.recorder.db_schema import ( ) from homeassistant.components.recorder.models import ( StatisticData, + StatisticMeanType, StatisticMetaData, process_timestamp, ) from homeassistant.components.recorder.statistics import ( + DEG_TO_RAD, + RAD_TO_DEG, async_import_statistics, get_metadata, list_statistic_ids, ) from homeassistant.components.recorder.util import get_instance, session_scope -from homeassistant.components.sensor import ATTR_OPTIONS, DOMAIN, SensorDeviceClass -from homeassistant.const import ATTR_FRIENDLY_NAME, STATE_UNAVAILABLE +from homeassistant.components.sensor import ( + ATTR_OPTIONS, + DOMAIN, + SensorDeviceClass, + SensorStateClass, +) +from homeassistant.components.sensor.recorder import ( + MEAN_TYPE_CHANGED_ISSUE, + STATE_CLASS_REMOVED_ISSUE, + UNITS_CHANGED_ISSUE, +) +from homeassistant.const import ATTR_FRIENDLY_NAME, DEGREE, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant, State from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component @@ -98,6 +112,13 @@ KW_SENSOR_ATTRIBUTES = { "state_class": "measurement", "unit_of_measurement": "kW", } +WIND_DIRECTION_ATTRIBUTES = { + "device_class": SensorDeviceClass.WIND_DIRECTION, + "state_class": SensorStateClass.MEASUREMENT_ANGLE, + "unit_of_measurement": DEGREE, +} +WIND_DIRECTION_STATES_SEQ = [350, 0, 15] +TEMP_STATES_SEQ = [-10, 15, 30, 60] @pytest.fixture @@ -281,6 +302,7 @@ async def test_compile_hourly_statistics( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -306,6 +328,64 @@ async def test_compile_hourly_statistics( assert "Error while processing event StatisticsTask" not in caplog.text +async def test_compile_hourly_statistics_angle( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test compiling hourly statistics for measurement_angle.""" + zero = get_start_time(dt_util.utcnow()) + await async_setup_component(hass, "sensor", {}) + # Wait for the sensor recorder platform to be added + await async_recorder_block_till_done(hass) + with freeze_time(zero) as freezer: + four, states = await async_record_states( + hass, + freezer, + zero, + "sensor.test1", + WIND_DIRECTION_ATTRIBUTES, + seq=WIND_DIRECTION_STATES_SEQ, + ) + await async_wait_recording_done(hass) + hist = history.get_significant_states( + hass, zero, four, hass.states.async_entity_ids() + ) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + do_adhoc_statistics(hass, start=zero) + await async_wait_recording_done(hass) + statistic_ids = await async_list_statistic_ids(hass) + assert statistic_ids == [ + { + "statistic_id": "sensor.test1", + "display_unit_of_measurement": DEGREE, + "has_mean": False, + "mean_type": StatisticMeanType.CIRCULAR, + "has_sum": False, + "name": None, + "source": "recorder", + "statistics_unit_of_measurement": DEGREE, + "unit_class": None, + } + ] + stats = statistics_during_period(hass, zero, period="5minute") + assert stats == { + "sensor.test1": [ + { + "start": process_timestamp(zero).timestamp(), + "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(0.5802544), + "min": None, + "max": None, + "last_reset": None, + "state": None, + "sum": None, + } + ] + } + assert "Error while processing event StatisticsTask" not in caplog.text + + @pytest.mark.parametrize( ( "device_class", @@ -349,7 +429,7 @@ async def test_compile_hourly_statistics_with_some_same_last_updated( "unit_of_measurement": state_unit, } attributes = dict(attributes) - seq = [-10, 15, 30, 60] + seq = TEMP_STATES_SEQ async def set_state(entity_id, state, **kwargs): """Set the state.""" @@ -395,6 +475,7 @@ async def test_compile_hourly_statistics_with_some_same_last_updated( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -420,33 +501,167 @@ async def test_compile_hourly_statistics_with_some_same_last_updated( assert "Error while processing event StatisticsTask" not in caplog.text +async def test_compile_hourly_statistics_with_some_same_last_updated_angle( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test compiling hourly statistics with the some of the same last updated value for measurement_angle. + + If the last updated value is the same we will have a zero duration. + """ + zero = get_start_time(dt_util.utcnow()) + await async_setup_component(hass, "sensor", {}) + # Wait for the sensor recorder platform to be added + await async_recorder_block_till_done(hass) + entity_id = "sensor.test1" + seq = [350, 2, 15, 345] + + async def set_state(entity_id, state, **kwargs): + """Set the state.""" + hass.states.async_set(entity_id, state, **kwargs) + await async_wait_recording_done(hass) + return hass.states.get(entity_id) + + one = zero + timedelta(seconds=1 * 5) + two = one + timedelta(seconds=10 * 5) + three = two + timedelta(seconds=40 * 5) + four = three + timedelta(seconds=10 * 5) + + states = {entity_id: []} + with freeze_time(one) as freezer: + states[entity_id].append( + await set_state( + entity_id, str(seq[0]), attributes=WIND_DIRECTION_ATTRIBUTES + ) + ) + + # Record two states at the exact same time + freezer.move_to(two) + states[entity_id].append( + await set_state( + entity_id, str(seq[1]), attributes=WIND_DIRECTION_ATTRIBUTES + ) + ) + states[entity_id].append( + await set_state( + entity_id, str(seq[2]), attributes=WIND_DIRECTION_ATTRIBUTES + ) + ) + + freezer.move_to(three) + states[entity_id].append( + await set_state( + entity_id, str(seq[3]), attributes=WIND_DIRECTION_ATTRIBUTES + ) + ) + + hist = history.get_significant_states( + hass, zero, four, hass.states.async_entity_ids() + ) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + do_adhoc_statistics(hass, start=zero) + await async_wait_recording_done(hass) + statistic_ids = await async_list_statistic_ids(hass) + assert statistic_ids == [ + { + "statistic_id": "sensor.test1", + "display_unit_of_measurement": DEGREE, + "has_mean": False, + "mean_type": StatisticMeanType.CIRCULAR, + "has_sum": False, + "name": None, + "source": "recorder", + "statistics_unit_of_measurement": DEGREE, + "unit_class": None, + } + ] + stats = statistics_during_period(hass, zero, period="5minute") + assert stats == { + "sensor.test1": [ + { + "start": process_timestamp(zero).timestamp(), + "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(6.274605), + "min": None, + "max": None, + "last_reset": None, + "state": None, + "sum": None, + } + ] + } + assert "Error while processing event StatisticsTask" not in caplog.text + + @pytest.mark.parametrize( ( - "device_class", - "state_unit", + "attributes", "display_unit", "statistics_unit", "unit_class", "mean", "min", "max", + "mean_type", + "seq", ), [ - ("temperature", "°C", "°C", "°C", "temperature", 60, -10, 60), - ("temperature", "°F", "°F", "°F", "temperature", 60, -10, 60), + ( + { + "device_class": "temperature", + "state_class": "measurement", + "unit_of_measurement": "°C", + }, + "°C", + "°C", + "temperature", + 60, + -10, + 60, + StatisticMeanType.ARITHMETIC, + TEMP_STATES_SEQ, + ), + ( + { + "device_class": "temperature", + "state_class": "measurement", + "unit_of_measurement": "°F", + }, + "°F", + "°F", + "temperature", + 60, + -10, + 60, + StatisticMeanType.ARITHMETIC, + TEMP_STATES_SEQ, + ), + ( + WIND_DIRECTION_ATTRIBUTES, + DEGREE, + DEGREE, + None, + 15, + None, + None, + StatisticMeanType.CIRCULAR, + [350, 0, 355, 15], + ), ], ) async def test_compile_hourly_statistics_with_all_same_last_updated( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - device_class, - state_unit, - display_unit, - statistics_unit, - unit_class, - mean, - min, - max, + attributes: dict[str, Any], + display_unit: str, + statistics_unit: str, + unit_class: str | None, + mean: float | None, + min: float | None, + max: float | None, + mean_type: StatisticMeanType, + seq: list[float], ) -> None: """Test compiling hourly statistics with the all of the same last updated value. @@ -457,13 +672,6 @@ async def test_compile_hourly_statistics_with_all_same_last_updated( # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) entity_id = "sensor.test1" - attributes = { - "device_class": device_class, - "state_class": "measurement", - "unit_of_measurement": state_unit, - } - attributes = dict(attributes) - seq = [-10, 15, 30, 60] async def set_state(entity_id, state, **kwargs): """Set the state.""" @@ -503,7 +711,8 @@ async def test_compile_hourly_statistics_with_all_same_last_updated( { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, - "has_mean": True, + "has_mean": mean_type is StatisticMeanType.ARITHMETIC, + "mean_type": mean_type, "has_sum": False, "name": None, "source": "recorder", @@ -531,31 +740,72 @@ async def test_compile_hourly_statistics_with_all_same_last_updated( @pytest.mark.parametrize( ( - "device_class", - "state_unit", + "attributes", "display_unit", "statistics_unit", "unit_class", "mean", "min", "max", + "mean_type", + "seq", ), [ - ("temperature", "°C", "°C", "°C", "temperature", 60, -10, 60), - ("temperature", "°F", "°F", "°F", "temperature", 60, -10, 60), + ( + { + "device_class": "temperature", + "state_class": "measurement", + "unit_of_measurement": "°C", + }, + "°C", + "°C", + "temperature", + 60, + -10, + 60, + StatisticMeanType.ARITHMETIC, + TEMP_STATES_SEQ, + ), + ( + { + "device_class": "temperature", + "state_class": "measurement", + "unit_of_measurement": "°F", + }, + "°F", + "°F", + "temperature", + 60, + -10, + 60, + StatisticMeanType.ARITHMETIC, + TEMP_STATES_SEQ, + ), + ( + WIND_DIRECTION_ATTRIBUTES, + DEGREE, + DEGREE, + None, + 15, + None, + None, + StatisticMeanType.CIRCULAR, + [350, 0, 355, 15], + ), ], ) async def test_compile_hourly_statistics_only_state_is_at_end_of_period( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - device_class, - state_unit, - display_unit, - statistics_unit, - unit_class, - mean, - min, - max, + attributes: dict[str, Any], + display_unit: str, + statistics_unit: str, + unit_class: str | None, + mean: float | None, + min: float | None, + max: float | None, + mean_type: StatisticMeanType, + seq: list[float], ) -> None: """Test compiling hourly statistics when the only states are at end of period.""" zero = get_start_time(dt_util.utcnow()) @@ -563,13 +813,6 @@ async def test_compile_hourly_statistics_only_state_is_at_end_of_period( # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) entity_id = "sensor.test1" - attributes = { - "device_class": device_class, - "state_class": "measurement", - "unit_of_measurement": state_unit, - } - attributes = dict(attributes) - seq = [-10, 15, 30, 60] async def set_state(entity_id, state, **kwargs): """Set the state.""" @@ -611,7 +854,8 @@ async def test_compile_hourly_statistics_only_state_is_at_end_of_period( { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, - "has_mean": True, + "has_mean": mean_type is StatisticMeanType.ARITHMETIC, + "mean_type": mean_type, "has_sum": False, "name": None, "source": "recorder", @@ -695,6 +939,7 @@ async def test_compile_hourly_statistics_purged_state_changes( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -781,6 +1026,7 @@ async def test_compile_hourly_statistics_ignore_future_state( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -873,6 +1119,7 @@ async def test_compile_hourly_statistics_wrong_unit( "statistic_id": "sensor.test1", "display_unit_of_measurement": "°C", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -882,6 +1129,7 @@ async def test_compile_hourly_statistics_wrong_unit( { "display_unit_of_measurement": "invalid", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -892,6 +1140,7 @@ async def test_compile_hourly_statistics_wrong_unit( { "display_unit_of_measurement": None, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -903,6 +1152,7 @@ async def test_compile_hourly_statistics_wrong_unit( "statistic_id": "sensor.test6", "display_unit_of_measurement": "°C", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -913,6 +1163,7 @@ async def test_compile_hourly_statistics_wrong_unit( "statistic_id": "sensor.test7", "display_unit_of_measurement": "°C", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -1084,6 +1335,7 @@ async def test_compile_hourly_sum_statistics_amount( "statistic_id": "sensor.test1", "display_unit_of_measurement": statistics_unit, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -1288,6 +1540,7 @@ async def test_compile_hourly_sum_statistics_amount_reset_every_state_change( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -1397,6 +1650,7 @@ async def test_compile_hourly_sum_statistics_amount_invalid_last_reset( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -1493,6 +1747,7 @@ async def test_compile_hourly_sum_statistics_nan_inf_state( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -1636,6 +1891,7 @@ async def test_compile_hourly_sum_statistics_negative_state( assert { "display_unit_of_measurement": display_unit, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -1737,6 +1993,7 @@ async def test_compile_hourly_sum_statistics_total_no_reset( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -1850,6 +2107,7 @@ async def test_compile_hourly_sum_statistics_total_increasing( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -1976,6 +2234,7 @@ async def test_compile_hourly_sum_statistics_total_increasing_small_dip( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -2080,6 +2339,7 @@ async def test_compile_hourly_energy_statistics_unsupported( "statistic_id": "sensor.test1", "display_unit_of_measurement": "kWh", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -2182,6 +2442,7 @@ async def test_compile_hourly_energy_statistics_multiple( "statistic_id": "sensor.test1", "display_unit_of_measurement": "kWh", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -2192,6 +2453,7 @@ async def test_compile_hourly_energy_statistics_multiple( "statistic_id": "sensor.test2", "display_unit_of_measurement": "kWh", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -2202,6 +2464,7 @@ async def test_compile_hourly_energy_statistics_multiple( "statistic_id": "sensor.test3", "display_unit_of_measurement": "Wh", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -2384,8 +2647,64 @@ async def test_compile_hourly_statistics_unchanged( assert "Error while processing event StatisticsTask" not in caplog.text +async def test_compile_hourly_statistics_unchanged_angle( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test compiling hourly statistics, with no changes during the hour for measurement_angle.""" + zero = get_start_time(dt_util.utcnow()) + await async_setup_component(hass, "sensor", {}) + # Wait for the sensor recorder platform to be added + await async_recorder_block_till_done(hass) + with freeze_time(zero) as freezer: + four, states = await async_record_states( + hass, + freezer, + zero, + "sensor.test1", + WIND_DIRECTION_ATTRIBUTES, + seq=WIND_DIRECTION_STATES_SEQ, + ) + await async_wait_recording_done(hass) + hist = history.get_significant_states( + hass, zero, four, hass.states.async_entity_ids() + ) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + do_adhoc_statistics(hass, start=four) + await async_wait_recording_done(hass) + stats = statistics_during_period(hass, four, period="5minute") + assert stats == { + "sensor.test1": [ + { + "start": process_timestamp(four).timestamp(), + "end": process_timestamp(four + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(15), + "min": None, + "max": None, + "last_reset": None, + "state": None, + "sum": None, + } + ] + } + assert "Error while processing event StatisticsTask" not in caplog.text + + +@pytest.mark.parametrize( + ("attributes", "expected_mean", "expected_min", "expected_max"), + [ + (TEMPERATURE_SENSOR_ATTRIBUTES, 21.1864406779661, 10.0, 25.0), + (WIND_DIRECTION_ATTRIBUTES, 21.202479155239875, None, None), + ], +) async def test_compile_hourly_statistics_partially_unavailable( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + attributes: dict, + expected_mean: float, + expected_min: float | None, + expected_max: float | None, ) -> None: """Test compiling hourly statistics, with the sensor being partially unavailable.""" zero = get_start_time(dt_util.utcnow()) @@ -2393,7 +2712,7 @@ async def test_compile_hourly_statistics_partially_unavailable( # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) four, states = await async_record_states_partially_unavailable( - hass, zero, "sensor.test1", TEMPERATURE_SENSOR_ATTRIBUTES + hass, zero, "sensor.test1", attributes ) await async_wait_recording_done(hass) hist = history.get_significant_states( @@ -2409,9 +2728,9 @@ async def test_compile_hourly_statistics_partially_unavailable( { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), - "mean": pytest.approx(21.1864406779661), - "min": pytest.approx(10.0), - "max": pytest.approx(25.0), + "mean": pytest.approx(expected_mean), + "min": pytest.approx(expected_min), + "max": pytest.approx(expected_max), "last_reset": None, "state": None, "sum": None, @@ -2502,6 +2821,58 @@ async def test_compile_hourly_statistics_unavailable( assert "Error while processing event StatisticsTask" not in caplog.text +async def test_compile_hourly_statistics_unavailable_angle( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test compiling hourly statistics, with one sensor being unavailable for measurement_angle. + + sensor.test1 is unavailable and should not have statistics generated + sensor.test2 should have statistics generated + """ + zero = get_start_time(dt_util.utcnow()) + await async_setup_component(hass, "sensor", {}) + # Wait for the sensor recorder platform to be added + await async_recorder_block_till_done(hass) + four, states = await async_record_states_partially_unavailable( + hass, zero, "sensor.test1", WIND_DIRECTION_ATTRIBUTES + ) + with freeze_time(zero) as freezer: + _, _states = await async_record_states( + hass, + freezer, + zero, + "sensor.test2", + WIND_DIRECTION_ATTRIBUTES, + seq=WIND_DIRECTION_STATES_SEQ, + ) + await async_wait_recording_done(hass) + states = {**states, **_states} + hist = history.get_significant_states( + hass, zero, four, hass.states.async_entity_ids() + ) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + do_adhoc_statistics(hass, start=four) + await async_wait_recording_done(hass) + stats = statistics_during_period(hass, four, period="5minute") + assert stats == { + "sensor.test2": [ + { + "start": process_timestamp(four).timestamp(), + "end": process_timestamp(four + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(15), + "min": None, + "max": None, + "last_reset": None, + "state": None, + "sum": None, + } + ] + } + assert "Error while processing event StatisticsTask" not in caplog.text + + async def test_compile_hourly_statistics_fails( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: @@ -2530,59 +2901,267 @@ async def test_compile_hourly_statistics_fails( "statistic_type", ), [ - ("measurement", "area", "m²", "m²", "m²", "area", "mean"), - ("measurement", "area", "mi²", "mi²", "mi²", "area", "mean"), + ("measurement", "area", "m²", "m²", "m²", "area", StatisticMeanType.ARITHMETIC), + ( + "measurement", + "area", + "mi²", + "mi²", + "mi²", + "area", + StatisticMeanType.ARITHMETIC, + ), ("total", "area", "m²", "m²", "m²", "area", "sum"), ("total", "area", "mi²", "mi²", "mi²", "area", "sum"), - ("measurement", "battery", "%", "%", "%", "unitless", "mean"), - ("measurement", "battery", None, None, None, "unitless", "mean"), - ("measurement", "distance", "m", "m", "m", "distance", "mean"), - ("measurement", "distance", "mi", "mi", "mi", "distance", "mean"), + ( + "measurement", + "battery", + "%", + "%", + "%", + "unitless", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "battery", + None, + None, + None, + "unitless", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "distance", + "m", + "m", + "m", + "distance", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "distance", + "mi", + "mi", + "mi", + "distance", + StatisticMeanType.ARITHMETIC, + ), ("total", "distance", "m", "m", "m", "distance", "sum"), ("total", "distance", "mi", "mi", "mi", "distance", "sum"), ("total", "energy", "Wh", "Wh", "Wh", "energy", "sum"), ("total", "energy", "kWh", "kWh", "kWh", "energy", "sum"), - ("measurement", "energy", "Wh", "Wh", "Wh", "energy", "mean"), - ("measurement", "energy", "kWh", "kWh", "kWh", "energy", "mean"), - ("measurement", "humidity", "%", "%", "%", "unitless", "mean"), - ("measurement", "humidity", None, None, None, "unitless", "mean"), + ( + "measurement", + "energy", + "Wh", + "Wh", + "Wh", + "energy", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "energy", + "kWh", + "kWh", + "kWh", + "energy", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "humidity", + "%", + "%", + "%", + "unitless", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "humidity", + None, + None, + None, + "unitless", + StatisticMeanType.ARITHMETIC, + ), ("total", "monetary", "USD", "USD", "USD", None, "sum"), ("total", "monetary", "None", "None", "None", None, "sum"), ("total", "gas", "m³", "m³", "m³", "volume", "sum"), ("total", "gas", "ft³", "ft³", "ft³", "volume", "sum"), - ("measurement", "monetary", "USD", "USD", "USD", None, "mean"), - ("measurement", "monetary", "None", "None", "None", None, "mean"), - ("measurement", "gas", "m³", "m³", "m³", "volume", "mean"), - ("measurement", "gas", "ft³", "ft³", "ft³", "volume", "mean"), - ("measurement", "pressure", "Pa", "Pa", "Pa", "pressure", "mean"), - ("measurement", "pressure", "hPa", "hPa", "hPa", "pressure", "mean"), - ("measurement", "pressure", "mbar", "mbar", "mbar", "pressure", "mean"), - ("measurement", "pressure", "inHg", "inHg", "inHg", "pressure", "mean"), - ("measurement", "pressure", "psi", "psi", "psi", "pressure", "mean"), - ("measurement", "speed", "m/s", "m/s", "m/s", "speed", "mean"), - ("measurement", "speed", "mph", "mph", "mph", "speed", "mean"), - ("measurement", "temperature", "°C", "°C", "°C", "temperature", "mean"), - ("measurement", "temperature", "°F", "°F", "°F", "temperature", "mean"), - ("measurement", "volume", "m³", "m³", "m³", "volume", "mean"), - ("measurement", "volume", "ft³", "ft³", "ft³", "volume", "mean"), + ( + "measurement", + "monetary", + "USD", + "USD", + "USD", + None, + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "monetary", + "None", + "None", + "None", + None, + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "gas", + "m³", + "m³", + "m³", + "volume", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "gas", + "ft³", + "ft³", + "ft³", + "volume", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "pressure", + "Pa", + "Pa", + "Pa", + "pressure", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "pressure", + "hPa", + "hPa", + "hPa", + "pressure", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "pressure", + "mbar", + "mbar", + "mbar", + "pressure", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "pressure", + "inHg", + "inHg", + "inHg", + "pressure", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "pressure", + "psi", + "psi", + "psi", + "pressure", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "speed", + "m/s", + "m/s", + "m/s", + "speed", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "speed", + "mph", + "mph", + "mph", + "speed", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "temperature", + "°C", + "°C", + "°C", + "temperature", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "temperature", + "°F", + "°F", + "°F", + "temperature", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "volume", + "m³", + "m³", + "m³", + "volume", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "volume", + "ft³", + "ft³", + "ft³", + "volume", + StatisticMeanType.ARITHMETIC, + ), ("total", "volume", "m³", "m³", "m³", "volume", "sum"), ("total", "volume", "ft³", "ft³", "ft³", "volume", "sum"), - ("measurement", "weight", "g", "g", "g", "mass", "mean"), - ("measurement", "weight", "oz", "oz", "oz", "mass", "mean"), + ("measurement", "weight", "g", "g", "g", "mass", StatisticMeanType.ARITHMETIC), + ( + "measurement", + "weight", + "oz", + "oz", + "oz", + "mass", + StatisticMeanType.ARITHMETIC, + ), ("total", "weight", "g", "g", "g", "mass", "sum"), ("total", "weight", "oz", "oz", "oz", "mass", "sum"), + ( + SensorStateClass.MEASUREMENT_ANGLE, + SensorDeviceClass.WIND_DIRECTION, + DEGREE, + DEGREE, + DEGREE, + None, + StatisticMeanType.CIRCULAR, + ), ], ) async def test_list_statistic_ids( hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - state_class, - device_class, - state_unit, - display_unit, - statistics_unit, - unit_class, - statistic_type, + state_class: str | SensorStateClass, + device_class: str | SensorDeviceClass, + state_unit: str, + display_unit: str, + statistics_unit: str, + unit_class: str | None, + statistic_type: str | StatisticMeanType, ) -> None: """Test listing future statistic ids.""" await async_setup_component(hass, "sensor", {}) @@ -2596,11 +3175,20 @@ async def test_list_statistic_ids( } hass.states.async_set("sensor.test1", 0, attributes=attributes) statistic_ids = await async_list_statistic_ids(hass) + mean_type = ( + statistic_type + if isinstance(statistic_type, StatisticMeanType) + else StatisticMeanType.NONE + ) + statistic_type = ( + statistic_type if not isinstance(statistic_type, StatisticMeanType) else "mean" + ) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, - "has_mean": statistic_type == "mean", + "has_mean": mean_type is StatisticMeanType.ARITHMETIC, + "mean_type": mean_type, "has_sum": statistic_type == "sum", "name": None, "source": "recorder", @@ -2608,6 +3196,7 @@ async def test_list_statistic_ids( "unit_class": unit_class, }, ] + for stat_type in ("mean", "sum", "dogs"): statistic_ids = await async_list_statistic_ids(hass, statistic_type=stat_type) if statistic_type == stat_type: @@ -2615,7 +3204,8 @@ async def test_list_statistic_ids( { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, - "has_mean": statistic_type == "mean", + "has_mean": mean_type is StatisticMeanType.ARITHMETIC, + "mean_type": mean_type, "has_sum": statistic_type == "sum", "name": None, "source": "recorder", @@ -2723,6 +3313,7 @@ async def test_compile_hourly_statistics_changing_units_1( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2758,6 +3349,7 @@ async def test_compile_hourly_statistics_changing_units_1( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2849,6 +3441,7 @@ async def test_compile_hourly_statistics_changing_units_2( "statistic_id": "sensor.test1", "display_unit_of_measurement": "cats", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2931,6 +3524,7 @@ async def test_compile_hourly_statistics_changing_units_3( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2966,6 +3560,7 @@ async def test_compile_hourly_statistics_changing_units_3( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3048,6 +3643,7 @@ async def test_compile_hourly_statistics_convert_units_1( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit_1, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3095,6 +3691,7 @@ async def test_compile_hourly_statistics_convert_units_1( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit_2, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3211,6 +3808,7 @@ async def test_compile_hourly_statistics_equivalent_units_1( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3242,6 +3840,7 @@ async def test_compile_hourly_statistics_equivalent_units_1( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit2, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3333,6 +3932,7 @@ async def test_compile_hourly_statistics_equivalent_units_2( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3417,6 +4017,7 @@ async def test_compile_hourly_statistics_changing_device_class_1( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3466,6 +4067,7 @@ async def test_compile_hourly_statistics_changing_device_class_1( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3525,6 +4127,7 @@ async def test_compile_hourly_statistics_changing_device_class_1( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3629,6 +4232,7 @@ async def test_compile_hourly_statistics_changing_device_class_2( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3678,6 +4282,7 @@ async def test_compile_hourly_statistics_changing_device_class_2( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3717,15 +4322,13 @@ async def test_compile_hourly_statistics_changing_device_class_2( ( "device_class", "state_unit", - "display_unit", - "statistics_unit", "unit_class", "mean", "min", "max", ), [ - (None, None, None, None, "unitless", 13.050847, -10, 30), + (None, None, "unitless", 13.050847, -10, 30), ], ) async def test_compile_hourly_statistics_changing_state_class( @@ -3733,8 +4336,6 @@ async def test_compile_hourly_statistics_changing_state_class( caplog: pytest.LogCaptureFixture, device_class, state_unit, - display_unit, - statistics_unit, unit_class, mean, min, @@ -3770,6 +4371,7 @@ async def test_compile_hourly_statistics_changing_state_class( "statistic_id": "sensor.test1", "display_unit_of_measurement": None, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3783,6 +4385,7 @@ async def test_compile_hourly_statistics_changing_state_class( 1, { "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3812,6 +4415,7 @@ async def test_compile_hourly_statistics_changing_state_class( "statistic_id": "sensor.test1", "display_unit_of_measurement": None, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -3825,6 +4429,7 @@ async def test_compile_hourly_statistics_changing_state_class( 1, { "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -3890,10 +4495,11 @@ async def test_compile_statistics_hourly_daily_monthly_summary( "unit_of_measurement": "EUR", } + durations = [50, 200, 45] + def _weighted_average(seq, i, last_state): total = 0 duration = 0 - durations = [50, 200, 45] if i > 0: total += last_state * 5 duration += 5 @@ -3902,6 +4508,24 @@ async def test_compile_statistics_hourly_daily_monthly_summary( duration += dur return total / duration + def _time_weighted_circular_mean(values: list[tuple[float, int]]): + sin_sum = 0 + cos_sum = 0 + for x, dur in values: + sin_sum += math.sin(x * DEG_TO_RAD) * dur + cos_sum += math.cos(x * DEG_TO_RAD) * dur + + return (RAD_TO_DEG * math.atan2(sin_sum, cos_sum)) % 360 + + def _circular_mean(values: list[float]) -> float: + sin_sum = 0 + cos_sum = 0 + for x in values: + sin_sum += math.sin(x * DEG_TO_RAD) + cos_sum += math.cos(x * DEG_TO_RAD) + + return (RAD_TO_DEG * math.atan2(sin_sum, cos_sum)) % 360 + def _min(seq, last_state): if last_state is None: return min(seq) @@ -3923,17 +4547,24 @@ async def test_compile_statistics_hourly_daily_monthly_summary( "sensor.test2": [], "sensor.test3": [], "sensor.test4": [], + "sensor.test5": [], } expected_minima = {"sensor.test1": [], "sensor.test2": [], "sensor.test3": []} expected_maxima = {"sensor.test1": [], "sensor.test2": [], "sensor.test3": []} - expected_averages = {"sensor.test1": [], "sensor.test2": [], "sensor.test3": []} + expected_means = { + "sensor.test1": [], + "sensor.test2": [], + "sensor.test3": [], + "sensor.test5": [], + } expected_states = {"sensor.test4": []} expected_sums = {"sensor.test4": []} - last_states = { + last_states: dict[str, float | None] = { "sensor.test1": None, "sensor.test2": None, "sensor.test3": None, "sensor.test4": None, + "sensor.test5": None, } start = zero for i in range(24): @@ -3946,7 +4577,7 @@ async def test_compile_statistics_hourly_daily_monthly_summary( last_state = last_states["sensor.test1"] expected_minima["sensor.test1"].append(_min(seq, last_state)) expected_maxima["sensor.test1"].append(_max(seq, last_state)) - expected_averages["sensor.test1"].append(_weighted_average(seq, i, last_state)) + expected_means["sensor.test1"].append(_weighted_average(seq, i, last_state)) last_states["sensor.test1"] = seq[-1] # test2 values change: min/max at the last state seq = [-10 * (i + 1), 15 * (i + 1), 30 * (i + 1)] @@ -3957,7 +4588,7 @@ async def test_compile_statistics_hourly_daily_monthly_summary( last_state = last_states["sensor.test2"] expected_minima["sensor.test2"].append(_min(seq, last_state)) expected_maxima["sensor.test2"].append(_max(seq, last_state)) - expected_averages["sensor.test2"].append(_weighted_average(seq, i, last_state)) + expected_means["sensor.test2"].append(_weighted_average(seq, i, last_state)) last_states["sensor.test2"] = seq[-1] # test3 values change: min/max at the first state seq = [-10 * (23 - i + 1), 15 * (23 - i + 1), 30 * (23 - i + 1)] @@ -3968,7 +4599,7 @@ async def test_compile_statistics_hourly_daily_monthly_summary( last_state = last_states["sensor.test3"] expected_minima["sensor.test3"].append(_min(seq, last_state)) expected_maxima["sensor.test3"].append(_max(seq, last_state)) - expected_averages["sensor.test3"].append(_weighted_average(seq, i, last_state)) + expected_means["sensor.test3"].append(_weighted_average(seq, i, last_state)) last_states["sensor.test3"] = seq[-1] # test4 values grow seq = [i, i + 0.5, i + 0.75] @@ -3991,6 +4622,18 @@ async def test_compile_statistics_hourly_daily_monthly_summary( ) last_states["sensor.test4"] = seq[-1] + # test5 circular mean + seq = [350 - i, 0 + (i / 2.0), 15 + i] + four, _states = await async_record_states( + hass, freezer, start, "sensor.test5", WIND_DIRECTION_ATTRIBUTES, seq + ) + states["sensor.test5"] += _states["sensor.test5"] + values = [(seq, durations[j]) for j, seq in enumerate(seq)] + if (state := last_states["sensor.test5"]) is not None: + values.append((state, 5)) + expected_means["sensor.test5"].append(_time_weighted_circular_mean(values)) + last_states["sensor.test5"] = seq[-1] + start += timedelta(minutes=5) await async_wait_recording_done(hass) hist = history.get_significant_states( @@ -4016,6 +4659,7 @@ async def test_compile_statistics_hourly_daily_monthly_summary( "statistic_id": "sensor.test1", "display_unit_of_measurement": "%", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -4026,6 +4670,7 @@ async def test_compile_statistics_hourly_daily_monthly_summary( "statistic_id": "sensor.test2", "display_unit_of_measurement": "%", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -4036,6 +4681,7 @@ async def test_compile_statistics_hourly_daily_monthly_summary( "statistic_id": "sensor.test3", "display_unit_of_measurement": "%", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -4046,12 +4692,24 @@ async def test_compile_statistics_hourly_daily_monthly_summary( "statistic_id": "sensor.test4", "display_unit_of_measurement": "EUR", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", "statistics_unit_of_measurement": "EUR", "unit_class": None, }, + { + "statistic_id": "sensor.test5", + "display_unit_of_measurement": DEGREE, + "has_mean": False, + "mean_type": StatisticMeanType.CIRCULAR, + "has_sum": False, + "name": None, + "source": "recorder", + "statistics_unit_of_measurement": DEGREE, + "unit_class": None, + }, ] # Adjust the inserted statistics @@ -4070,6 +4728,7 @@ async def test_compile_statistics_hourly_daily_monthly_summary( "sensor.test2": [], "sensor.test3": [], "sensor.test4": [], + "sensor.test5": [], } start = zero end = zero + timedelta(minutes=5) @@ -4079,11 +4738,10 @@ async def test_compile_statistics_hourly_daily_monthly_summary( "sensor.test2", "sensor.test3", "sensor.test4", + "sensor.test5", ): expected_average = ( - expected_averages[entity_id][i] - if entity_id in expected_averages - else None + expected_means[entity_id][i] if entity_id in expected_means else None ) expected_minimum = ( expected_minima[entity_id][i] if entity_id in expected_minima else None @@ -4113,176 +4771,78 @@ async def test_compile_statistics_hourly_daily_monthly_summary( end += timedelta(minutes=5) assert stats == expected_stats - stats = statistics_during_period(hass, zero, period="hour") - expected_stats = { - "sensor.test1": [], - "sensor.test2": [], - "sensor.test3": [], - "sensor.test4": [], - } - start = zero - end = zero + timedelta(hours=1) - for i in range(2): - for entity_id in ( - "sensor.test1", - "sensor.test2", - "sensor.test3", - "sensor.test4", - ): - expected_average = ( - mean(expected_averages[entity_id][i * 12 : (i + 1) * 12]) - if entity_id in expected_averages - else None - ) - expected_minimum = ( - min(expected_minima[entity_id][i * 12 : (i + 1) * 12]) - if entity_id in expected_minima - else None - ) - expected_maximum = ( - max(expected_maxima[entity_id][i * 12 : (i + 1) * 12]) - if entity_id in expected_maxima - else None - ) - expected_state = ( - expected_states[entity_id][(i + 1) * 12 - 1] - if entity_id in expected_states - else None - ) - expected_sum = ( - expected_sums[entity_id][(i + 1) * 12 - 1] - if entity_id in expected_sums - else None - ) - expected_stats[entity_id].append( - { - "start": process_timestamp(start).timestamp(), - "end": process_timestamp(end).timestamp(), - "mean": pytest.approx(expected_average), - "min": pytest.approx(expected_minimum), - "max": pytest.approx(expected_maximum), - "last_reset": None, - "state": expected_state, - "sum": expected_sum, - } - ) - start += timedelta(hours=1) - end += timedelta(hours=1) - assert stats == expected_stats + def verify_stats( + period: Literal["5minute", "day", "hour", "week", "month"], + start: datetime, + next_datetime: Callable[[datetime], datetime], + ) -> None: + stats = statistics_during_period(hass, zero, period=period) + expected_stats = { + "sensor.test1": [], + "sensor.test2": [], + "sensor.test3": [], + "sensor.test4": [], + "sensor.test5": [], + } + end = next_datetime(start) + for i in range(2): + for entity_id, mean_fn in ( + ("sensor.test1", mean), + ("sensor.test2", mean), + ("sensor.test3", mean), + ("sensor.test4", mean), + ("sensor.test5", _circular_mean), + ): + expected_average = ( + mean_fn(expected_means[entity_id][i * 12 : (i + 1) * 12]) + if entity_id in expected_means + else None + ) + expected_minimum = ( + min(expected_minima[entity_id][i * 12 : (i + 1) * 12]) + if entity_id in expected_minima + else None + ) + expected_maximum = ( + max(expected_maxima[entity_id][i * 12 : (i + 1) * 12]) + if entity_id in expected_maxima + else None + ) + expected_state = ( + expected_states[entity_id][(i + 1) * 12 - 1] + if entity_id in expected_states + else None + ) + expected_sum = ( + expected_sums[entity_id][(i + 1) * 12 - 1] + if entity_id in expected_sums + else None + ) + expected_stats[entity_id].append( + { + "start": process_timestamp(start).timestamp(), + "end": process_timestamp(end).timestamp(), + "mean": pytest.approx(expected_average), + "min": pytest.approx(expected_minimum), + "max": pytest.approx(expected_maximum), + "last_reset": None, + "state": expected_state, + "sum": expected_sum, + } + ) + start = next_datetime(start) + end = next_datetime(end) + assert stats == expected_stats + + verify_stats("hour", zero, lambda v: v + timedelta(hours=1)) - stats = statistics_during_period(hass, zero, period="day") - expected_stats = { - "sensor.test1": [], - "sensor.test2": [], - "sensor.test3": [], - "sensor.test4": [], - } start = dt_util.parse_datetime("2021-08-31T06:00:00+00:00") - end = start + timedelta(days=1) - for i in range(2): - for entity_id in ( - "sensor.test1", - "sensor.test2", - "sensor.test3", - "sensor.test4", - ): - expected_average = ( - mean(expected_averages[entity_id][i * 12 : (i + 1) * 12]) - if entity_id in expected_averages - else None - ) - expected_minimum = ( - min(expected_minima[entity_id][i * 12 : (i + 1) * 12]) - if entity_id in expected_minima - else None - ) - expected_maximum = ( - max(expected_maxima[entity_id][i * 12 : (i + 1) * 12]) - if entity_id in expected_maxima - else None - ) - expected_state = ( - expected_states[entity_id][(i + 1) * 12 - 1] - if entity_id in expected_states - else None - ) - expected_sum = ( - expected_sums[entity_id][(i + 1) * 12 - 1] - if entity_id in expected_sums - else None - ) - expected_stats[entity_id].append( - { - "start": process_timestamp(start).timestamp(), - "end": process_timestamp(end).timestamp(), - "mean": pytest.approx(expected_average), - "min": pytest.approx(expected_minimum), - "max": pytest.approx(expected_maximum), - "last_reset": None, - "state": expected_state, - "sum": expected_sum, - } - ) - start += timedelta(days=1) - end += timedelta(days=1) - assert stats == expected_stats + assert start + verify_stats("day", start, lambda v: v + timedelta(days=1)) - stats = statistics_during_period(hass, zero, period="month") - expected_stats = { - "sensor.test1": [], - "sensor.test2": [], - "sensor.test3": [], - "sensor.test4": [], - } start = dt_util.parse_datetime("2021-08-01T06:00:00+00:00") - end = dt_util.parse_datetime("2021-09-01T06:00:00+00:00") - for i in range(2): - for entity_id in ( - "sensor.test1", - "sensor.test2", - "sensor.test3", - "sensor.test4", - ): - expected_average = ( - mean(expected_averages[entity_id][i * 12 : (i + 1) * 12]) - if entity_id in expected_averages - else None - ) - expected_minimum = ( - min(expected_minima[entity_id][i * 12 : (i + 1) * 12]) - if entity_id in expected_minima - else None - ) - expected_maximum = ( - max(expected_maxima[entity_id][i * 12 : (i + 1) * 12]) - if entity_id in expected_maxima - else None - ) - expected_state = ( - expected_states[entity_id][(i + 1) * 12 - 1] - if entity_id in expected_states - else None - ) - expected_sum = ( - expected_sums[entity_id][(i + 1) * 12 - 1] - if entity_id in expected_sums - else None - ) - expected_stats[entity_id].append( - { - "start": process_timestamp(start).timestamp(), - "end": process_timestamp(end).timestamp(), - "mean": pytest.approx(expected_average), - "min": pytest.approx(expected_minimum), - "max": pytest.approx(expected_maximum), - "last_reset": None, - "state": expected_state, - "sum": expected_sum, - } - ) - start = (start + timedelta(days=31)).replace(day=1) - end = (end + timedelta(days=31)).replace(day=1) - assert stats == expected_stats + assert start + verify_stats("month", start, lambda v: (v + timedelta(days=31)).replace(day=1)) assert "Error while processing event StatisticsTask" not in caplog.text @@ -4428,11 +4988,11 @@ async def test_validate_unit_change_convertible( "statistic_id": "sensor.test", "supported_unit": supported_unit, }, - "type": "units_changed", + "type": UNITS_CHANGED_ISSUE, } ], } - await assert_validation_result(hass, client, expected, {"units_changed"}) + await assert_validation_result(hass, client, expected, {UNITS_CHANGED_ISSUE}) # Unavailable state - empty response hass.states.async_set( @@ -4653,11 +5213,11 @@ async def test_validate_statistics_unit_change_no_device_class( "statistic_id": "sensor.test", "supported_unit": supported_unit, }, - "type": "units_changed", + "type": UNITS_CHANGED_ISSUE, } ], } - await assert_validation_result(hass, client, expected, {"units_changed"}) + await assert_validation_result(hass, client, expected, {UNITS_CHANGED_ISSUE}) # Unavailable state - empty response hass.states.async_set( @@ -4769,11 +5329,11 @@ async def test_validate_statistics_state_class_removed( "sensor.test": [ { "data": {"statistic_id": "sensor.test"}, - "type": "state_class_removed", + "type": STATE_CLASS_REMOVED_ISSUE, } ], } - await assert_validation_result(hass, client, expected, {"state_class_removed"}) + await assert_validation_result(hass, client, expected, {STATE_CLASS_REMOVED_ISSUE}) # Unavailable state - empty response hass.states.async_set( @@ -4837,11 +5397,11 @@ async def test_validate_statistics_state_class_removed_issue_cleaned_up( "sensor.test": [ { "data": {"statistic_id": "sensor.test"}, - "type": "state_class_removed", + "type": STATE_CLASS_REMOVED_ISSUE, } ], } - await assert_validation_result(hass, client, expected, {"state_class_removed"}) + await assert_validation_result(hass, client, expected, {STATE_CLASS_REMOVED_ISSUE}) # Remove the statistics - empty response get_instance(hass).async_clear_statistics(["sensor.test"]) @@ -5086,11 +5646,11 @@ async def test_validate_statistics_unit_change_no_conversion( "statistic_id": "sensor.test", "supported_unit": unit1, }, - "type": "units_changed", + "type": UNITS_CHANGED_ISSUE, } ], } - await assert_validation_result(hass, client, expected, {"units_changed"}) + await assert_validation_result(hass, client, expected, {UNITS_CHANGED_ISSUE}) # Unavailable state - empty response hass.states.async_set( @@ -5267,11 +5827,11 @@ async def test_validate_statistics_unit_change_equivalent_units_2( "statistic_id": "sensor.test", "supported_unit": supported_unit, }, - "type": "units_changed", + "type": UNITS_CHANGED_ISSUE, } ], } - await assert_validation_result(hass, client, expected, {"units_changed"}) + await assert_validation_result(hass, client, expected, {UNITS_CHANGED_ISSUE}) # Run statistics one hour later, metadata will not be updated await async_recorder_block_till_done(hass) @@ -5280,7 +5840,7 @@ async def test_validate_statistics_unit_change_equivalent_units_2( await assert_statistic_ids( hass, [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] ) - await assert_validation_result(hass, client, expected, {"units_changed"}) + await assert_validation_result(hass, client, expected, {UNITS_CHANGED_ISSUE}) async def test_validate_statistics_other_domain( @@ -5369,7 +5929,7 @@ async def test_update_statistics_issues( now = await one_hour_stats(now) expected = { "state_class_removed_sensor.test": { - "issue_type": "state_class_removed", + "issue_type": STATE_CLASS_REMOVED_ISSUE, "statistic_id": "sensor.test", } } @@ -5573,8 +6133,9 @@ async def test_clean_up_repairs( create_issue("test", "test_issue", None) create_issue(DOMAIN, "test_issue_1", None) create_issue(DOMAIN, "test_issue_2", {"issue_type": "another_issue"}) - create_issue(DOMAIN, "test_issue_3", {"issue_type": "state_class_removed"}) - create_issue(DOMAIN, "test_issue_4", {"issue_type": "units_changed"}) + create_issue(DOMAIN, "test_issue_3", {"issue_type": STATE_CLASS_REMOVED_ISSUE}) + create_issue(DOMAIN, "test_issue_4", {"issue_type": UNITS_CHANGED_ISSUE}) + create_issue(DOMAIN, "test_issue_5", {"issue_type": MEAN_TYPE_CHANGED_ISSUE}) # Check the issues assert set(issue_registry.issues) == { @@ -5583,6 +6144,7 @@ async def test_clean_up_repairs( ("sensor", "test_issue_2"), ("sensor", "test_issue_3"), ("sensor", "test_issue_4"), + ("sensor", "test_issue_5"), } # Request update of issues @@ -5596,3 +6158,140 @@ async def test_clean_up_repairs( ("sensor", "test_issue_1"), ("sensor", "test_issue_2"), } + + +async def test_validate_statistics_mean_type_changed( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test validate_statistics. + + This tests a validation issue is created when a the mean type is changed. + """ + now = get_start_time(dt_util.utcnow()) + + await async_setup_component(hass, "sensor", {}) + await async_recorder_block_till_done(hass) + client = await hass_ws_client() + + # No statistics, no state - empty response + await assert_validation_result(hass, client, {}, {}) + + # No statistics, original unit - empty response + hass.states.async_set( + "sensor.wind_direction", + 10, + attributes=WIND_DIRECTION_ATTRIBUTES, + timestamp=now.timestamp(), + ) + await assert_validation_result(hass, client, {}, {}) + + # Run statistics + await async_recorder_block_till_done(hass) + do_adhoc_statistics(hass, start=now) + await async_recorder_block_till_done(hass) + statistic_ids = await async_list_statistic_ids(hass) + assert statistic_ids == [ + { + "statistic_id": "sensor.wind_direction", + "display_unit_of_measurement": DEGREE, + "has_mean": False, + "mean_type": StatisticMeanType.CIRCULAR, + "has_sum": False, + "name": None, + "source": "recorder", + "statistics_unit_of_measurement": DEGREE, + "unit_class": None, + } + ] + + expected_log_entry = ( + "homeassistant.components.sensor.recorder", + logging.WARNING, + ( + "The statistics mean algorithm for sensor.wind_direction have changed from" + " CIRCULAR to ARITHMETIC. Generation of long term statistics will be " + "suppressed unless it changes back or go to " + "https://my.home-assistant.io/redirect/developer_statistics " + "to delete the old statistics" + ), + ) + # Valid stats, no log entry + assert expected_log_entry not in caplog.record_tuples + + # State class changed + hass.states.async_set( + "sensor.wind_direction", + 5, + attributes={ + **WIND_DIRECTION_ATTRIBUTES, + "state_class": SensorStateClass.MEASUREMENT, + }, + timestamp=now.timestamp(), + ) + expected = { + "sensor.wind_direction": [ + { + "data": { + "statistic_id": "sensor.wind_direction", + "metadata_mean_type": StatisticMeanType.CIRCULAR, + "state_mean_type": StatisticMeanType.ARITHMETIC, + }, + "type": MEAN_TYPE_CHANGED_ISSUE, + } + ], + } + await assert_validation_result(hass, client, expected, {MEAN_TYPE_CHANGED_ISSUE}) + + # Run statistics one hour later, metadata will not be updated + await async_recorder_block_till_done(hass) + do_adhoc_statistics(hass, start=now + timedelta(hours=1)) + await async_recorder_block_till_done(hass) + statistic_ids = await async_list_statistic_ids(hass) + assert statistic_ids == [ + { + "statistic_id": "sensor.wind_direction", + "display_unit_of_measurement": DEGREE, + "has_mean": False, + "mean_type": StatisticMeanType.CIRCULAR, + "has_sum": False, + "name": None, + "source": "recorder", + "statistics_unit_of_measurement": DEGREE, + "unit_class": None, + } + ] + await assert_validation_result(hass, client, expected, {MEAN_TYPE_CHANGED_ISSUE}) + assert expected_log_entry in caplog.record_tuples + + # State class changed back + hass.states.async_set( + "sensor.wind_direction", + 350, + attributes=WIND_DIRECTION_ATTRIBUTES, + timestamp=now.timestamp(), + ) + await assert_validation_result(hass, client, {}, {}) + + # Run statistics + await async_recorder_block_till_done(hass) + do_adhoc_statistics(hass, start=now) + await async_recorder_block_till_done(hass) + statistic_ids = await async_list_statistic_ids(hass) + assert statistic_ids == [ + { + "statistic_id": "sensor.wind_direction", + "display_unit_of_measurement": DEGREE, + "has_mean": False, + "mean_type": StatisticMeanType.CIRCULAR, + "has_sum": False, + "name": None, + "source": "recorder", + "statistics_unit_of_measurement": DEGREE, + "unit_class": None, + } + ] + + # Issue should be resolved + await assert_validation_result(hass, client, {}, {}) diff --git a/tests/components/shelly/__init__.py b/tests/components/shelly/__init__.py index ddece280d8a..ec2d3d2c829 100644 --- a/tests/components/shelly/__init__.py +++ b/tests/components/shelly/__init__.py @@ -143,20 +143,6 @@ def get_entity( ) -def get_entity_state(hass: HomeAssistant, entity_id: str) -> str: - """Return entity state.""" - entity = hass.states.get(entity_id) - assert entity - return entity.state - - -def get_entity_attribute(hass: HomeAssistant, entity_id: str, attribute: str) -> str: - """Return entity attribute.""" - entity = hass.states.get(entity_id) - assert entity - return entity.attributes[attribute] - - def register_device( device_registry: DeviceRegistry, config_entry: ConfigEntry ) -> DeviceEntry: diff --git a/tests/components/shelly/test_binary_sensor.py b/tests/components/shelly/test_binary_sensor.py index 1e7c54320e8..ea3a7d5f3d2 100644 --- a/tests/components/shelly/test_binary_sensor.py +++ b/tests/components/shelly/test_binary_sensor.py @@ -39,15 +39,16 @@ async def test_block_binary_sensor( entity_id = f"{BINARY_SENSOR_DOMAIN}.test_name_channel_1_overpowering" await init_integration(hass, 1) - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF monkeypatch.setattr(mock_block_device.blocks[RELAY_BLOCK_ID], "overpower", 1) mock_block_device.mock_update() - assert hass.states.get(entity_id).state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-relay_0-overpower" @@ -61,19 +62,18 @@ async def test_block_binary_sensor_extra_state_attr( entity_id = f"{BINARY_SENSOR_DOMAIN}.test_name_gas" await init_integration(hass, 1) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes.get("detected") == "mild" monkeypatch.setattr(mock_block_device.blocks[SENSOR_BLOCK_ID], "gas", "none") mock_block_device.mock_update() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF assert state.attributes.get("detected") == "none" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-sensor_0-gas" @@ -89,15 +89,16 @@ async def test_block_rest_binary_sensor( monkeypatch.setitem(mock_block_device.status, "cloud", {"connected": False}) await init_integration(hass, 1) - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF monkeypatch.setitem(mock_block_device.status["cloud"], "connected", True) await mock_rest_update(hass, freezer) - assert hass.states.get(entity_id).state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-cloud" @@ -115,20 +116,22 @@ async def test_block_rest_binary_sensor_connected_battery_devices( monkeypatch.setitem(mock_block_device.settings["coiot"], "update_period", 3600) await init_integration(hass, 1, model=MODEL_MOTION) - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF monkeypatch.setitem(mock_block_device.status["cloud"], "connected", True) # Verify no update on fast intervals await mock_rest_update(hass, freezer) - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF # Verify update on slow intervals await mock_rest_update(hass, freezer, seconds=UPDATE_PERIOD_MULTIPLIER * 3600) - assert hass.states.get(entity_id).state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-cloud" @@ -149,15 +152,16 @@ async def test_block_sleeping_binary_sensor( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF monkeypatch.setattr(mock_block_device.blocks[SENSOR_BLOCK_ID], "motion", 1) mock_block_device.mock_update() - assert hass.states.get(entity_id).state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-sensor_0-motion" @@ -183,14 +187,16 @@ async def test_block_restored_sleeping_binary_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON # Make device online monkeypatch.setattr(mock_block_device, "initialized", True) mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF async def test_block_restored_sleeping_binary_sensor_no_last_state( @@ -214,14 +220,16 @@ async def test_block_restored_sleeping_binary_sensor_no_last_state( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNKNOWN # Make device online monkeypatch.setattr(mock_block_device, "initialized", True) mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF async def test_rpc_binary_sensor( @@ -234,17 +242,18 @@ async def test_rpc_binary_sensor( entity_id = f"{BINARY_SENSOR_DOMAIN}.test_cover_0_overpowering" await init_integration(hass, 2) - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF mutate_rpc_device_status( monkeypatch, mock_rpc_device, "cover:0", "errors", "overpower" ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-cover:0-overpower" @@ -290,20 +299,22 @@ async def test_rpc_sleeping_binary_sensor( mock_rpc_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cloud", "connected", True) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_ON - - # test external power sensor - state = hass.states.get("binary_sensor.test_name_external_power") - assert state + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - entry = entity_registry.async_get("binary_sensor.test_name_external_power") - assert entry + # test external power sensor + assert (state := hass.states.get("binary_sensor.test_name_external_power")) + assert state.state == STATE_ON + + assert ( + entry := entity_registry.async_get("binary_sensor.test_name_external_power") + ) assert entry.unique_id == "123456789ABC-devicepower:0-external_power" @@ -331,14 +342,16 @@ async def test_rpc_restored_sleeping_binary_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON # Make device online monkeypatch.setattr(mock_rpc_device, "initialized", True) mock_rpc_device.mock_update() await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF async def test_rpc_restored_sleeping_binary_sensor_no_last_state( @@ -364,7 +377,8 @@ async def test_rpc_restored_sleeping_binary_sensor_no_last_state( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNKNOWN # Make device online monkeypatch.setattr(mock_rpc_device, "initialized", True) @@ -375,7 +389,8 @@ async def test_rpc_restored_sleeping_binary_sensor_no_last_state( mock_rpc_device.mock_update() await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF @pytest.mark.parametrize( @@ -407,17 +422,17 @@ async def test_rpc_device_virtual_binary_sensor( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-boolean:203-boolean" monkeypatch.setitem(mock_rpc_device.status["boolean:203"], "value", False) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_OFF + + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF async def test_rpc_remove_virtual_binary_sensor_when_mode_toggle( @@ -450,8 +465,7 @@ async def test_rpc_remove_virtual_binary_sensor_when_mode_toggle( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_rpc_remove_virtual_binary_sensor_when_orphaned( @@ -475,8 +489,7 @@ async def test_rpc_remove_virtual_binary_sensor_when_orphaned( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_blu_trv_binary_sensor_entity( diff --git a/tests/components/shelly/test_button.py b/tests/components/shelly/test_button.py index 2a9720ca7ae..2057076d18b 100644 --- a/tests/components/shelly/test_button.py +++ b/tests/components/shelly/test_button.py @@ -27,10 +27,10 @@ async def test_block_button( entity_id = "button.test_name_reboot" # reboot button - assert hass.states.get(entity_id).state == STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNKNOWN - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC_reboot" await hass.services.async_call( @@ -54,10 +54,10 @@ async def test_rpc_button( entity_id = "button.test_name_reboot" # reboot button - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state == snapshot(name=f"{entity_id}-state") - entry = entity_registry.async_get(entity_id) + assert (entry := entity_registry.async_get(entity_id)) assert entry == snapshot(name=f"{entity_id}-entry") await hass.services.async_call( @@ -74,11 +74,11 @@ async def test_rpc_button( [ ( DeviceConnectionError, - "Device communication error occurred while calling the entity button.test_name_reboot action for Test name device", + "Device communication error occurred while calling action for button.test_name_reboot of Test name", ), ( RpcCallError(999), - "RPC call error occurred while calling the entity button.test_name_reboot action for Test name device", + "RPC call error occurred while calling action for button.test_name_reboot of Test name", ), ], ) @@ -212,11 +212,11 @@ async def test_rpc_blu_trv_button( [ ( DeviceConnectionError, - "Device communication error occurred while calling the entity button.trv_name_calibrate action for Test name device", + "Device communication error occurred while calling action for button.trv_name_calibrate of Test name", ), ( RpcCallError(999), - "RPC call error occurred while calling the entity button.trv_name_calibrate action for Test name device", + "RPC call error occurred while calling action for button.trv_name_calibrate of Test name", ), ], ) diff --git a/tests/components/shelly/test_climate.py b/tests/components/shelly/test_climate.py index ac9c7967540..b2135fb38af 100644 --- a/tests/components/shelly/test_climate.py +++ b/tests/components/shelly/test_climate.py @@ -44,13 +44,7 @@ from homeassistant.helpers.device_registry import DeviceRegistry from homeassistant.helpers.entity_registry import EntityRegistry from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM -from . import ( - MOCK_MAC, - get_entity_attribute, - init_integration, - register_device, - register_entity, -) +from . import MOCK_MAC, init_integration, register_device, register_entity from .conftest import MOCK_STATUS_COAP from tests.common import mock_restore_cache, mock_restore_cache_with_extra_data @@ -86,11 +80,9 @@ async def test_climate_hvac_mode( await hass.async_block_till_done(wait_background_tasks=True) # Test initial hvac mode - off - state = hass.states.get(ENTITY_ID) - assert state == snapshot(name=f"{ENTITY_ID}-state") + assert hass.states.get(ENTITY_ID) == snapshot(name=f"{ENTITY_ID}-state") - entry = entity_registry.async_get(ENTITY_ID) - assert entry == snapshot(name=f"{ENTITY_ID}-entry") + assert entity_registry.async_get(ENTITY_ID) == snapshot(name=f"{ENTITY_ID}-entry") # Test set hvac mode heat await hass.services.async_call( @@ -105,7 +97,8 @@ async def test_climate_hvac_mode( monkeypatch.setattr(mock_block_device.blocks[SENSOR_BLOCK_ID], "targetTemp", 20.0) mock_block_device.mock_update() - state = hass.states.get(ENTITY_ID) + + assert (state := hass.states.get(ENTITY_ID)) assert state.state == HVACMode.HEAT # Test set hvac mode off @@ -122,13 +115,13 @@ async def test_climate_hvac_mode( monkeypatch.setattr(mock_block_device.blocks[SENSOR_BLOCK_ID], "targetTemp", 4.0) mock_block_device.mock_update() - state = hass.states.get(ENTITY_ID) + assert (state := hass.states.get(ENTITY_ID)) assert state.state == HVACMode.OFF # Test unavailable on error monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "valveError", 1) mock_block_device.mock_update() - state = hass.states.get(ENTITY_ID) + assert (state := hass.states.get(ENTITY_ID)) assert state.state == STATE_UNAVAILABLE @@ -145,7 +138,7 @@ async def test_climate_set_temperature( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(ENTITY_ID) + assert (state := hass.states.get(ENTITY_ID)) assert state.state == HVACMode.OFF assert state.attributes[ATTR_TEMPERATURE] == 4 @@ -199,7 +192,7 @@ async def test_climate_set_preset_mode( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(ENTITY_ID) + assert (state := hass.states.get(ENTITY_ID)) assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE # Test set Profile2 @@ -217,7 +210,7 @@ async def test_climate_set_preset_mode( monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "mode", 2) mock_block_device.mock_update() - state = hass.states.get(ENTITY_ID) + assert (state := hass.states.get(ENTITY_ID)) assert state.attributes[ATTR_PRESET_MODE] == "Profile2" # Set preset to none @@ -236,7 +229,7 @@ async def test_climate_set_preset_mode( monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "mode", 0) mock_block_device.mock_update() - state = hass.states.get(ENTITY_ID) + assert (state := hass.states.get(ENTITY_ID)) assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE @@ -271,23 +264,26 @@ async def test_block_restored_climate( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == HVACMode.OFF - assert hass.states.get(entity_id).attributes.get("temperature") == 4.0 + assert (state := hass.states.get(entity_id)) + assert state.state == HVACMode.OFF + assert state.attributes.get(ATTR_TEMPERATURE) == 4.0 # Partial update, should not change state mock_block_device.mock_update() await hass.async_block_till_done() - assert hass.states.get(entity_id).state == HVACMode.OFF - assert hass.states.get(entity_id).attributes.get("temperature") == 4.0 + assert (state := hass.states.get(entity_id)) + assert state.state == HVACMode.OFF + assert state.attributes.get(ATTR_TEMPERATURE) == 4.0 # Make device online monkeypatch.setattr(mock_block_device, "initialized", True) mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == HVACMode.OFF - assert hass.states.get(entity_id).attributes.get("temperature") == 4.0 + assert (state := hass.states.get(entity_id)) + assert state.state == HVACMode.OFF + assert state.attributes.get(ATTR_TEMPERATURE) == 4.0 # Test set hvac mode heat, target temp should be set to last target temp (22) await hass.services.async_call( @@ -302,9 +298,10 @@ async def test_block_restored_climate( monkeypatch.setattr(mock_block_device.blocks[SENSOR_BLOCK_ID], "targetTemp", 22.0) mock_block_device.mock_update() - state = hass.states.get(ENTITY_ID) + + assert (state := hass.states.get(entity_id)) assert state.state == HVACMode.HEAT - assert hass.states.get(entity_id).attributes.get("temperature") == 22.0 + assert state.attributes.get(ATTR_TEMPERATURE) == 22.0 async def test_block_restored_climate_us_customary( @@ -339,17 +336,19 @@ async def test_block_restored_climate_us_customary( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == HVACMode.OFF - assert hass.states.get(entity_id).attributes.get("temperature") == 39 - assert hass.states.get(entity_id).attributes.get("current_temperature") == 67 + assert (state := hass.states.get(entity_id)) + assert state.state == HVACMode.OFF + assert state.attributes.get(ATTR_TEMPERATURE) == 39 + assert state.attributes.get(ATTR_CURRENT_TEMPERATURE) == 67 # Partial update, should not change state mock_block_device.mock_update() await hass.async_block_till_done() - assert hass.states.get(entity_id).state == HVACMode.OFF - assert hass.states.get(entity_id).attributes.get("temperature") == 39 - assert hass.states.get(entity_id).attributes.get("current_temperature") == 67 + assert (state := hass.states.get(entity_id)) + assert state.state == HVACMode.OFF + assert state.attributes.get(ATTR_TEMPERATURE) == 39 + assert state.attributes.get(ATTR_CURRENT_TEMPERATURE) == 67 # Make device online monkeypatch.setattr(mock_block_device, "initialized", True) @@ -358,9 +357,10 @@ async def test_block_restored_climate_us_customary( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == HVACMode.OFF - assert hass.states.get(entity_id).attributes.get("temperature") == 39 - assert hass.states.get(entity_id).attributes.get("current_temperature") == 65 + assert (state := hass.states.get(entity_id)) + assert state.state == HVACMode.OFF + assert state.attributes.get(ATTR_TEMPERATURE) == 39 + assert state.attributes.get(ATTR_CURRENT_TEMPERATURE) == 65 # Test set hvac mode heat, target temp should be set to last target temp (10.0/50) await hass.services.async_call( @@ -375,9 +375,10 @@ async def test_block_restored_climate_us_customary( monkeypatch.setattr(mock_block_device.blocks[SENSOR_BLOCK_ID], "targetTemp", 10.0) mock_block_device.mock_update() - state = hass.states.get(ENTITY_ID) + + assert (state := hass.states.get(entity_id)) assert state.state == HVACMode.HEAT - assert hass.states.get(entity_id).attributes.get("temperature") == 50 + assert state.attributes.get(ATTR_TEMPERATURE) == 50 async def test_block_restored_climate_unavailable( @@ -405,7 +406,8 @@ async def test_block_restored_climate_unavailable( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == HVACMode.OFF + assert (state := hass.states.get(entity_id)) + assert state.state == HVACMode.OFF async def test_block_restored_climate_set_preset_before_online( @@ -433,7 +435,8 @@ async def test_block_restored_climate_set_preset_before_online( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == HVACMode.HEAT + assert (state := hass.states.get(entity_id)) + assert state.state == HVACMode.HEAT with pytest.raises(ServiceValidationError): await hass.services.async_call( @@ -462,7 +465,10 @@ async def test_block_set_mode_connection_error( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - with pytest.raises(HomeAssistantError): + with pytest.raises( + HomeAssistantError, + match="Device communication error occurred while calling action for climate.test_name of Test name", + ): await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, @@ -612,16 +618,14 @@ async def test_rpc_climate_hvac_mode( await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) - state = hass.states.get(entity_id) - assert state == snapshot(name=f"{entity_id}-state") + assert (state := hass.states.get(entity_id)) == snapshot(name=f"{entity_id}-state") - entry = entity_registry.async_get(entity_id) - assert entry == snapshot(name=f"{entity_id}-entry") + assert entity_registry.async_get(entity_id) == snapshot(name=f"{entity_id}-entry") monkeypatch.setitem(mock_rpc_device.status["thermostat:0"], "output", False) mock_rpc_device.mock_update() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.IDLE assert state.attributes[ATTR_CURRENT_HUMIDITY] == 44.4 @@ -637,7 +641,7 @@ async def test_rpc_climate_hvac_mode( mock_rpc_device.call_rpc.assert_called_once_with( "Thermostat.SetConfig", {"config": {"id": 0, "enable": False}} ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == HVACMode.OFF @@ -655,15 +659,14 @@ async def test_rpc_climate_without_humidity( await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == HVACMode.HEAT assert state.attributes[ATTR_TEMPERATURE] == 23 assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 12.3 assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.HEATING assert ATTR_CURRENT_HUMIDITY not in state.attributes - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-thermostat:0" @@ -675,7 +678,7 @@ async def test_rpc_climate_set_temperature( await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_TEMPERATURE] == 23 monkeypatch.setitem(mock_rpc_device.status["thermostat:0"], "target_C", 28) @@ -690,7 +693,7 @@ async def test_rpc_climate_set_temperature( mock_rpc_device.call_rpc.assert_called_once_with( "Thermostat.SetConfig", {"config": {"id": 0, "target_C": 28}} ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_TEMPERATURE] == 28 @@ -705,7 +708,7 @@ async def test_rpc_climate_hvac_mode_cool( await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == HVACMode.COOL assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.COOLING @@ -754,19 +757,16 @@ async def test_wall_display_thermostat_mode_external_actuator( await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) # the switch entity should be created - state = hass.states.get(switch_entity_id) - assert state + assert (state := hass.states.get(switch_entity_id)) assert state.state == STATE_ON assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 # the climate entity should be created - state = hass.states.get(climate_entity_id) - assert state + assert (state := hass.states.get(climate_entity_id)) assert state.state == HVACMode.HEAT assert len(hass.states.async_entity_ids(CLIMATE_DOMAIN)) == 1 - entry = entity_registry.async_get(climate_entity_id) - assert entry + assert (entry := entity_registry.async_get(climate_entity_id)) assert entry.unique_id == "123456789ABC-thermostat:0" @@ -784,13 +784,9 @@ async def test_blu_trv_climate_set_temperature( await init_integration(hass, 3, model=MODEL_BLU_GATEWAY_G3) - state = hass.states.get(entity_id) - assert state == snapshot(name=f"{entity_id}-state") + assert (state := hass.states.get(entity_id)) == snapshot(name=f"{entity_id}-state") - entry = entity_registry.async_get(entity_id) - assert entry == snapshot(name=f"{entity_id}-entry") - - assert get_entity_attribute(hass, entity_id, ATTR_TEMPERATURE) == 17.1 + assert entity_registry.async_get(entity_id) == snapshot(name=f"{entity_id}-entry") monkeypatch.setitem( mock_blu_trv.status[f"{BLU_TRV_IDENTIFIER}:200"], "target_C", 28 @@ -813,7 +809,8 @@ async def test_blu_trv_climate_set_temperature( BLU_TRV_TIMEOUT, ) - assert get_entity_attribute(hass, entity_id, ATTR_TEMPERATURE) == 28 + assert (state := hass.states.get(entity_id)) + assert state.attributes[ATTR_TEMPERATURE] == 28 async def test_blu_trv_climate_disabled( @@ -828,14 +825,16 @@ async def test_blu_trv_climate_disabled( await init_integration(hass, 3, model=MODEL_BLU_GATEWAY_G3) - assert get_entity_attribute(hass, entity_id, ATTR_TEMPERATURE) == 17.1 + assert (state := hass.states.get(entity_id)) + assert state.attributes[ATTR_TEMPERATURE] == 17.1 monkeypatch.setitem( mock_blu_trv.config[f"{BLU_TRV_IDENTIFIER}:200"], "enable", False ) mock_blu_trv.mock_update() - assert get_entity_attribute(hass, entity_id, ATTR_TEMPERATURE) is None + assert (state := hass.states.get(entity_id)) + assert state.attributes[ATTR_TEMPERATURE] is None async def test_blu_trv_climate_hvac_action( @@ -850,9 +849,11 @@ async def test_blu_trv_climate_hvac_action( await init_integration(hass, 3, model=MODEL_BLU_GATEWAY_G3) - assert get_entity_attribute(hass, entity_id, ATTR_HVAC_ACTION) == HVACAction.IDLE + assert (state := hass.states.get(entity_id)) + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.IDLE monkeypatch.setitem(mock_blu_trv.status[f"{BLU_TRV_IDENTIFIER}:200"], "pos", 10) mock_blu_trv.mock_update() - assert get_entity_attribute(hass, entity_id, ATTR_HVAC_ACTION) == HVACAction.HEATING + assert (state := hass.states.get(entity_id)) + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.HEATING diff --git a/tests/components/shelly/test_config_flow.py b/tests/components/shelly/test_config_flow.py index 0b2d355cfd8..fffffc21cae 100644 --- a/tests/components/shelly/test_config_flow.py +++ b/tests/components/shelly/test_config_flow.py @@ -24,6 +24,7 @@ from homeassistant.components.shelly.const import ( BLEScannerMode, ) from homeassistant.components.shelly.coordinator import ENTRY_RELOAD_COOLDOWN +from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ( CONF_HOST, CONF_MODEL, @@ -744,6 +745,38 @@ async def test_zeroconf_sleeping_device_error(hass: HomeAssistant) -> None: assert result["reason"] == "cannot_connect" +async def test_options_flow_abort_setup_retry( + hass: HomeAssistant, mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch +) -> None: + """Test ble options abort if device is in setup retry.""" + monkeypatch.setattr( + mock_rpc_device, "initialize", AsyncMock(side_effect=DeviceConnectionError) + ) + entry = await init_integration(hass, 2) + + assert entry.state is ConfigEntryState.SETUP_RETRY + + result = await hass.config_entries.options.async_init(entry.entry_id) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" + + +async def test_options_flow_abort_no_scripts_support( + hass: HomeAssistant, mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch +) -> None: + """Test ble options abort if device does not support scripts.""" + monkeypatch.setattr( + mock_rpc_device, "supports_scripts", AsyncMock(return_value=False) + ) + entry = await init_integration(hass, 2) + + result = await hass.config_entries.options.async_init(entry.entry_id) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "no_scripts_support" + + async def test_zeroconf_already_configured(hass: HomeAssistant) -> None: """Test we get the form.""" @@ -1080,7 +1113,7 @@ async def test_options_flow_ble(hass: HomeAssistant, mock_rpc_device: Mock) -> N await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"][CONF_BLE_SCANNER_MODE] == BLEScannerMode.DISABLED + assert result["data"][CONF_BLE_SCANNER_MODE] is BLEScannerMode.DISABLED result = await hass.config_entries.options.async_init(entry.entry_id) assert result["type"] is FlowResultType.FORM @@ -1096,7 +1129,7 @@ async def test_options_flow_ble(hass: HomeAssistant, mock_rpc_device: Mock) -> N await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"][CONF_BLE_SCANNER_MODE] == BLEScannerMode.ACTIVE + assert result["data"][CONF_BLE_SCANNER_MODE] is BLEScannerMode.ACTIVE result = await hass.config_entries.options.async_init(entry.entry_id) assert result["type"] is FlowResultType.FORM @@ -1112,7 +1145,7 @@ async def test_options_flow_ble(hass: HomeAssistant, mock_rpc_device: Mock) -> N await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"][CONF_BLE_SCANNER_MODE] == BLEScannerMode.PASSIVE + assert result["data"][CONF_BLE_SCANNER_MODE] is BLEScannerMode.PASSIVE await hass.config_entries.async_unload(entry.entry_id) diff --git a/tests/components/shelly/test_coordinator.py b/tests/components/shelly/test_coordinator.py index 55a1d8958cd..f89bec8853a 100644 --- a/tests/components/shelly/test_coordinator.py +++ b/tests/components/shelly/test_coordinator.py @@ -32,7 +32,6 @@ from homeassistant.helpers import device_registry as dr, issue_registry as ir from . import ( MOCK_MAC, - get_entity_state, init_integration, inject_rpc_device_event, mock_polling_rpc_update, @@ -72,7 +71,7 @@ async def test_block_reload_on_cfg_change( async_fire_time_changed(hass) await hass.async_block_till_done() - assert hass.states.get("switch.test_name_channel_1") is not None + assert hass.states.get("switch.test_name_channel_1") # Generate config change from switch to light monkeypatch.setitem( @@ -82,7 +81,7 @@ async def test_block_reload_on_cfg_change( mock_block_device.mock_update() await hass.async_block_till_done() - assert hass.states.get("switch.test_name_channel_1") is not None + assert hass.states.get("switch.test_name_channel_1") # Wait for debouncer freezer.tick(timedelta(seconds=ENTRY_RELOAD_COOLDOWN)) @@ -114,14 +113,14 @@ async def test_block_no_reload_on_bulb_changes( mock_block_device.mock_update() await hass.async_block_till_done() - assert hass.states.get("switch.test_name_channel_1") is not None + assert hass.states.get("switch.test_name_channel_1") # Wait for debouncer freezer.tick(timedelta(seconds=ENTRY_RELOAD_COOLDOWN)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert hass.states.get("switch.test_name_channel_1") is not None + assert hass.states.get("switch.test_name_channel_1") # Test no reload on effect change monkeypatch.setattr(mock_block_device.blocks[LIGHT_BLOCK_ID], "effect", 1) @@ -129,14 +128,14 @@ async def test_block_no_reload_on_bulb_changes( mock_block_device.mock_update() await hass.async_block_till_done() - assert hass.states.get("switch.test_name_channel_1") is not None + assert hass.states.get("switch.test_name_channel_1") # Wait for debouncer freezer.tick(timedelta(seconds=ENTRY_RELOAD_COOLDOWN)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert hass.states.get("switch.test_name_channel_1") is not None + assert hass.states.get("switch.test_name_channel_1") async def test_block_polling_auth_error( @@ -245,14 +244,16 @@ async def test_block_polling_connection_error( ) await init_integration(hass, 1) - assert get_entity_state(hass, "switch.test_name_channel_1") == STATE_ON + assert (state := hass.states.get("switch.test_name_channel_1")) + assert state.state == STATE_ON # Move time to generate polling freezer.tick(timedelta(seconds=UPDATE_PERIOD_MULTIPLIER * 15)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert get_entity_state(hass, "switch.test_name_channel_1") == STATE_UNAVAILABLE + assert (state := hass.states.get("switch.test_name_channel_1")) + assert state.state == STATE_UNAVAILABLE @pytest.mark.parametrize("exc", [DeviceConnectionError, MacAddressMismatchError]) @@ -270,12 +271,14 @@ async def test_block_rest_update_connection_error( await init_integration(hass, 1) await mock_rest_update(hass, freezer) - assert get_entity_state(hass, entity_id) == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON monkeypatch.setattr(mock_block_device, "update_shelly", AsyncMock(side_effect=exc)) await mock_rest_update(hass, freezer) - assert get_entity_state(hass, entity_id) == STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE async def test_block_sleeping_device_no_periodic_updates( @@ -297,14 +300,16 @@ async def test_block_sleeping_device_no_periodic_updates( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert get_entity_state(hass, entity_id) == "22.1" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.1" # Move time to generate polling freezer.tick(timedelta(seconds=UPDATE_PERIOD_MULTIPLIER * 3600)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert get_entity_state(hass, entity_id) == STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE async def test_block_device_push_updates_failure( @@ -416,7 +421,7 @@ async def test_rpc_reload_on_cfg_change( ) await hass.async_block_till_done() - assert hass.states.get("switch.test_switch_0") is not None + assert hass.states.get("switch.test_switch_0") # Wait for debouncer freezer.tick(timedelta(seconds=ENTRY_RELOAD_COOLDOWN)) @@ -596,14 +601,16 @@ async def test_rpc_sleeping_device_no_periodic_updates( mock_rpc_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert get_entity_state(hass, entity_id) == "22.9" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.9" # Move time to generate polling freezer.tick(timedelta(seconds=UPDATE_PERIOD_MULTIPLIER * 1000)) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - assert get_entity_state(hass, entity_id) is STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE async def test_rpc_sleeping_device_firmware_unsupported( @@ -716,7 +723,8 @@ async def test_rpc_reconnect_error( monkeypatch.setitem(mock_rpc_device.status["sys"], "relay_in_thermostat", False) await init_integration(hass, 2) - assert get_entity_state(hass, "switch.test_switch_0") == STATE_ON + assert (state := hass.states.get("switch.test_switch_0")) + assert state.state == STATE_ON monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setattr(mock_rpc_device, "initialize", AsyncMock(side_effect=exc)) @@ -726,7 +734,8 @@ async def test_rpc_reconnect_error( async_fire_time_changed(hass) await hass.async_block_till_done() - assert get_entity_state(hass, "switch.test_switch_0") == STATE_UNAVAILABLE + assert (state := hass.states.get("switch.test_switch_0")) + assert state.state == STATE_UNAVAILABLE async def test_rpc_error_running_connected_events( @@ -748,14 +757,17 @@ async def test_rpc_error_running_connected_events( ) assert "Error running connected events for device" in caplog.text - assert get_entity_state(hass, "switch.test_switch_0") == STATE_UNAVAILABLE + + assert (state := hass.states.get("switch.test_switch_0")) + assert state.state == STATE_UNAVAILABLE # Move time to generate reconnect without error freezer.tick(timedelta(seconds=RPC_RECONNECT_INTERVAL)) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - assert get_entity_state(hass, "switch.test_switch_0") == STATE_ON + assert (state := hass.states.get("switch.test_switch_0")) + assert state.state == STATE_ON async def test_rpc_polling_connection_error( @@ -776,11 +788,13 @@ async def test_rpc_polling_connection_error( ), ) - assert get_entity_state(hass, entity_id) == "-63" + assert (state := hass.states.get(entity_id)) + assert state.state == "-63" await mock_polling_rpc_update(hass, freezer) - assert get_entity_state(hass, entity_id) == STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE async def test_rpc_polling_disconnected( @@ -795,11 +809,13 @@ async def test_rpc_polling_disconnected( monkeypatch.setattr(mock_rpc_device, "connected", False) - assert get_entity_state(hass, entity_id) == "-63" + assert (state := hass.states.get(entity_id)) + assert state.state == "-63" await mock_polling_rpc_update(hass, freezer) - assert get_entity_state(hass, entity_id) == STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE async def test_rpc_update_entry_fw_ver( @@ -837,12 +853,17 @@ async def test_rpc_update_entry_fw_ver( assert device.sw_version == "99.0.0" +@pytest.mark.parametrize(("supports_scripts"), [True, False]) async def test_rpc_runs_connected_events_when_initialized( hass: HomeAssistant, mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch, + supports_scripts: bool, ) -> None: """Test RPC runs connected events when initialized.""" + monkeypatch.setattr( + mock_rpc_device, "supports_scripts", AsyncMock(return_value=supports_scripts) + ) monkeypatch.setattr(mock_rpc_device, "initialized", False) await init_integration(hass, 2) @@ -853,8 +874,9 @@ async def test_rpc_runs_connected_events_when_initialized( mock_rpc_device.mock_initialized() await hass.async_block_till_done() - # BLE script list is called during connected events - assert call.script_list() in mock_rpc_device.mock_calls + assert call.supports_scripts() in mock_rpc_device.mock_calls + # BLE script list is called during connected events if device supports scripts + assert bool(call.script_list() in mock_rpc_device.mock_calls) == supports_scripts async def test_rpc_sleeping_device_unload_ignore_ble_scanner( @@ -903,7 +925,8 @@ async def test_block_sleeping_device_connection_error( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert get_entity_state(hass, entity_id) == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON # Make device online event with connection error monkeypatch.setattr( @@ -917,7 +940,8 @@ async def test_block_sleeping_device_connection_error( await hass.async_block_till_done(wait_background_tasks=True) assert "Error connecting to Shelly device" in caplog.text - assert get_entity_state(hass, entity_id) == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON # Move time to generate sleep period update freezer.tick(timedelta(seconds=sleep_period * UPDATE_PERIOD_MULTIPLIER)) @@ -925,7 +949,8 @@ async def test_block_sleeping_device_connection_error( await hass.async_block_till_done(wait_background_tasks=True) assert "Sleeping device did not update" in caplog.text - assert get_entity_state(hass, entity_id) == STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE async def test_rpc_sleeping_device_connection_error( @@ -954,7 +979,8 @@ async def test_rpc_sleeping_device_connection_error( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert get_entity_state(hass, entity_id) == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON # Make device online event with connection error monkeypatch.setattr( @@ -968,7 +994,8 @@ async def test_rpc_sleeping_device_connection_error( await hass.async_block_till_done(wait_background_tasks=True) assert "Error connecting to Shelly device" in caplog.text - assert get_entity_state(hass, entity_id) == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON # Move time to generate sleep period update freezer.tick(timedelta(seconds=sleep_period * UPDATE_PERIOD_MULTIPLIER)) @@ -976,7 +1003,8 @@ async def test_rpc_sleeping_device_connection_error( await hass.async_block_till_done(wait_background_tasks=True) assert "Sleeping device did not update" in caplog.text - assert get_entity_state(hass, entity_id) == STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE async def test_rpc_sleeping_device_late_setup( @@ -1001,7 +1029,8 @@ async def test_rpc_sleeping_device_late_setup( monkeypatch.setattr(mock_rpc_device, "connected", True) mock_rpc_device.mock_initialized() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get("sensor.test_name_temperature") is not None + + assert hass.states.get("sensor.test_name_temperature") async def test_rpc_already_connected( diff --git a/tests/components/shelly/test_cover.py b/tests/components/shelly/test_cover.py index 40a364fd435..df3ab4f288d 100644 --- a/tests/components/shelly/test_cover.py +++ b/tests/components/shelly/test_cover.py @@ -47,7 +47,7 @@ async def test_block_device_services( {ATTR_ENTITY_ID: entity_id, ATTR_POSITION: 50}, blocking=True, ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_CURRENT_POSITION] == 50 await hass.services.async_call( @@ -56,7 +56,8 @@ async def test_block_device_services( {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get(entity_id).state == CoverState.OPENING + assert (state := hass.states.get(entity_id)) + assert state.state == CoverState.OPENING await hass.services.async_call( COVER_DOMAIN, @@ -64,7 +65,8 @@ async def test_block_device_services( {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get(entity_id).state == CoverState.CLOSING + assert (state := hass.states.get(entity_id)) + assert state.state == CoverState.CLOSING await hass.services.async_call( COVER_DOMAIN, @@ -72,10 +74,10 @@ async def test_block_device_services( {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get(entity_id).state == CoverState.CLOSED + assert (state := hass.states.get(entity_id)) + assert state.state == CoverState.CLOSED - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-roller_0" @@ -86,11 +88,15 @@ async def test_block_device_update( monkeypatch.setattr(mock_block_device.blocks[ROLLER_BLOCK_ID], "rollerPos", 0) await init_integration(hass, 1) - assert hass.states.get("cover.test_name").state == CoverState.CLOSED + state = hass.states.get("cover.test_name") + assert state + assert state.state == CoverState.CLOSED monkeypatch.setattr(mock_block_device.blocks[ROLLER_BLOCK_ID], "rollerPos", 100) mock_block_device.mock_update() - assert hass.states.get("cover.test_name").state == CoverState.OPEN + state = hass.states.get("cover.test_name") + assert state + assert state.state == CoverState.OPEN async def test_block_device_no_roller_blocks( @@ -99,6 +105,7 @@ async def test_block_device_no_roller_blocks( """Test block device without roller blocks.""" monkeypatch.setattr(mock_block_device.blocks[ROLLER_BLOCK_ID], "type", None) await init_integration(hass, 1) + assert hass.states.get("cover.test_name") is None @@ -118,7 +125,7 @@ async def test_rpc_device_services( {ATTR_ENTITY_ID: entity_id, ATTR_POSITION: 50}, blocking=True, ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_CURRENT_POSITION] == 50 mutate_rpc_device_status( @@ -131,7 +138,9 @@ async def test_rpc_device_services( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == CoverState.OPENING + + assert (state := hass.states.get(entity_id)) + assert state.state == CoverState.OPENING mutate_rpc_device_status( monkeypatch, mock_rpc_device, "cover:0", "state", "closing" @@ -143,7 +152,9 @@ async def test_rpc_device_services( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == CoverState.CLOSING + + assert (state := hass.states.get(entity_id)) + assert state.state == CoverState.CLOSING mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "state", "closed") await hass.services.async_call( @@ -153,10 +164,10 @@ async def test_rpc_device_services( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == CoverState.CLOSED + assert (state := hass.states.get(entity_id)) + assert state.state == CoverState.CLOSED - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-cover:0" @@ -166,6 +177,7 @@ async def test_rpc_device_no_cover_keys( """Test RPC device without cover keys.""" monkeypatch.delitem(mock_rpc_device.status, "cover:0") await init_integration(hass, 2) + assert hass.states.get("cover.test_cover_0") is None @@ -175,11 +187,16 @@ async def test_rpc_device_update( """Test RPC device update.""" mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "state", "closed") await init_integration(hass, 2) - assert hass.states.get("cover.test_cover_0").state == CoverState.CLOSED + + state = hass.states.get("cover.test_cover_0") + assert state + assert state.state == CoverState.CLOSED mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "state", "open") mock_rpc_device.mock_update() - assert hass.states.get("cover.test_cover_0").state == CoverState.OPEN + state = hass.states.get("cover.test_cover_0") + assert state + assert state.state == CoverState.OPEN async def test_rpc_device_no_position_control( @@ -190,7 +207,10 @@ async def test_rpc_device_no_position_control( monkeypatch, mock_rpc_device, "cover:0", "pos_control", False ) await init_integration(hass, 2) - assert hass.states.get("cover.test_cover_0").state == CoverState.OPEN + + state = hass.states.get("cover.test_cover_0") + assert state + assert state.state == CoverState.OPEN async def test_rpc_cover_tilt( @@ -212,11 +232,10 @@ async def test_rpc_cover_tilt( await init_integration(hass, 3) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-cover:0" await hass.services.async_call( @@ -228,7 +247,7 @@ async def test_rpc_cover_tilt( mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "slat_pos", 50) mock_rpc_device.mock_update() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 await hass.services.async_call( @@ -240,7 +259,7 @@ async def test_rpc_cover_tilt( mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "slat_pos", 100) mock_rpc_device.mock_update() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 await hass.services.async_call( @@ -258,5 +277,5 @@ async def test_rpc_cover_tilt( mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "slat_pos", 10) mock_rpc_device.mock_update() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 10 diff --git a/tests/components/shelly/test_device_trigger.py b/tests/components/shelly/test_device_trigger.py index 89045208d20..ca9edb19fa7 100644 --- a/tests/components/shelly/test_device_trigger.py +++ b/tests/components/shelly/test_device_trigger.py @@ -168,7 +168,10 @@ async def test_get_triggers_for_invalid_device_id( connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, ) - with pytest.raises(InvalidDeviceAutomationConfig): + with pytest.raises( + InvalidDeviceAutomationConfig, + match="not found while configuring device automation triggers", + ): await async_get_device_automations( hass, DeviceAutomationType.TRIGGER, invalid_device.id ) @@ -384,7 +387,10 @@ async def test_validate_trigger_invalid_triggers( }, ) - assert "Invalid (type,subtype): ('single', 'button3')" in caplog.text + assert ( + "Invalid device automation trigger (type, subtype): ('single', 'button3')" + in caplog.text + ) async def test_rpc_no_runtime_data( diff --git a/tests/components/shelly/test_event.py b/tests/components/shelly/test_event.py index e184c154697..a5367408955 100644 --- a/tests/components/shelly/test_event.py +++ b/tests/components/shelly/test_event.py @@ -33,8 +33,7 @@ async def test_rpc_button( await init_integration(hass, 2) entity_id = "event.test_name_input_0" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == STATE_UNKNOWN assert state.attributes.get(ATTR_EVENT_TYPES) == unordered( ["btn_down", "btn_up", "double_push", "long_push", "single_push", "triple_push"] @@ -42,8 +41,7 @@ async def test_rpc_button( assert state.attributes.get(ATTR_EVENT_TYPE) is None assert state.attributes.get(ATTR_DEVICE_CLASS) == EventDeviceClass.BUTTON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-input:0" inject_rpc_device_event( @@ -62,7 +60,7 @@ async def test_rpc_button( ) await hass.async_block_till_done() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes.get(ATTR_EVENT_TYPE) == "single_push" @@ -78,11 +76,9 @@ async def test_rpc_script_1_event( await init_integration(hass, 2) entity_id = "event.test_name_test_script_js" - state = hass.states.get(entity_id) - assert state == snapshot(name=f"{entity_id}-state") + assert hass.states.get(entity_id) == snapshot(name=f"{entity_id}-state") - entry = entity_registry.async_get(entity_id) - assert entry == snapshot(name=f"{entity_id}-entry") + assert entity_registry.async_get(entity_id) == snapshot(name=f"{entity_id}-entry") inject_rpc_device_event( monkeypatch, @@ -101,7 +97,7 @@ async def test_rpc_script_1_event( ) await hass.async_block_till_done() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes.get(ATTR_EVENT_TYPE) == "script_start" inject_rpc_device_event( @@ -121,7 +117,7 @@ async def test_rpc_script_1_event( ) await hass.async_block_till_done() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes.get(ATTR_EVENT_TYPE) != "unknown_event" @@ -135,11 +131,9 @@ async def test_rpc_script_2_event( await init_integration(hass, 2) entity_id = "event.test_name_test_script_2_js" - state = hass.states.get(entity_id) - assert state == snapshot(name=f"{entity_id}-state") + assert hass.states.get(entity_id) == snapshot(name=f"{entity_id}-state") - entry = entity_registry.async_get(entity_id) - assert entry == snapshot(name=f"{entity_id}-entry") + assert entity_registry.async_get(entity_id) == snapshot(name=f"{entity_id}-entry") @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -152,11 +146,9 @@ async def test_rpc_script_ble_event( await init_integration(hass, 2) entity_id = f"event.test_name_{BLE_SCRIPT_NAME}" - state = hass.states.get(entity_id) - assert state == snapshot(name=f"{entity_id}-state") + assert hass.states.get(entity_id) == snapshot(name=f"{entity_id}-state") - entry = entity_registry.async_get(entity_id) - assert entry == snapshot(name=f"{entity_id}-entry") + assert entity_registry.async_get(entity_id) == snapshot(name=f"{entity_id}-entry") async def test_rpc_event_removal( @@ -186,15 +178,13 @@ async def test_block_event( await init_integration(hass, 1) entity_id = "event.test_name_channel_1" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == STATE_UNKNOWN assert state.attributes.get(ATTR_EVENT_TYPES) == unordered(["single", "long"]) assert state.attributes.get(ATTR_EVENT_TYPE) is None assert state.attributes.get(ATTR_DEVICE_CLASS) == EventDeviceClass.BUTTON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-relay_0-1" monkeypatch.setattr( @@ -206,7 +196,7 @@ async def test_block_event( mock_block_device.mock_update() await hass.async_block_till_done() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes.get(ATTR_EVENT_TYPE) == "long" @@ -217,8 +207,7 @@ async def test_block_event_shix3_1( await init_integration(hass, 1, model=MODEL_I3) entity_id = "event.test_name_channel_1" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.attributes.get(ATTR_EVENT_TYPES) == unordered( ["double", "long", "long_single", "single", "single_long", "triple"] ) diff --git a/tests/components/shelly/test_init.py b/tests/components/shelly/test_init.py index 0cec6383461..129aa812580 100644 --- a/tests/components/shelly/test_init.py +++ b/tests/components/shelly/test_init.py @@ -307,7 +307,8 @@ async def test_sleeping_rpc_device_online_during_setup( assert "will resume when device is online" in caplog.text assert "is online (source: setup)" in caplog.text - assert hass.states.get("sensor.test_name_temperature") is not None + + assert hass.states.get("sensor.test_name_temperature") async def test_sleeping_rpc_device_offline_during_setup( @@ -336,7 +337,7 @@ async def test_sleeping_rpc_device_offline_during_setup( mock_rpc_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get("sensor.test_name_temperature") is not None + assert hass.states.get("sensor.test_name_temperature") @pytest.mark.parametrize( @@ -360,13 +361,15 @@ async def test_entry_unload( entry = await init_integration(hass, gen) assert entry.state is ConfigEntryState.LOADED - assert hass.states.get(entity_id).state is STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() assert entry.state is ConfigEntryState.NOT_LOADED - assert hass.states.get(entity_id).state is STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE @pytest.mark.parametrize( @@ -384,9 +387,9 @@ async def test_entry_unload_device_not_ready( mock_rpc_device: Mock, ) -> None: """Test entry unload when device is not ready.""" - entry = await init_integration(hass, gen, sleep_period=1000) - + assert (entry := await init_integration(hass, gen, sleep_period=1000)) assert entry.state is ConfigEntryState.LOADED + assert hass.states.get(entity_id) is None await hass.config_entries.async_unload(entry.entry_id) @@ -405,13 +408,15 @@ async def test_entry_unload_not_connected( with patch( "homeassistant.components.shelly.coordinator.async_stop_scanner" ) as mock_stop_scanner: - entry = await init_integration( - hass, 2, options={CONF_BLE_SCANNER_MODE: BLEScannerMode.ACTIVE} + assert ( + entry := await init_integration( + hass, 2, options={CONF_BLE_SCANNER_MODE: BLEScannerMode.ACTIVE} + ) ) - entity_id = "switch.test_switch_0" - assert entry.state is ConfigEntryState.LOADED - assert hass.states.get(entity_id).state is STATE_ON + + assert (state := hass.states.get("switch.test_switch_0")) + assert state.state == STATE_ON assert not mock_stop_scanner.call_count monkeypatch.setattr(mock_rpc_device, "connected", False) @@ -434,13 +439,15 @@ async def test_entry_unload_not_connected_but_we_think_we_are( "homeassistant.components.shelly.coordinator.async_stop_scanner", side_effect=DeviceConnectionError, ) as mock_stop_scanner: - entry = await init_integration( - hass, 2, options={CONF_BLE_SCANNER_MODE: BLEScannerMode.ACTIVE} + assert ( + entry := await init_integration( + hass, 2, options={CONF_BLE_SCANNER_MODE: BLEScannerMode.ACTIVE} + ) ) - entity_id = "switch.test_switch_0" - assert entry.state is ConfigEntryState.LOADED - assert hass.states.get(entity_id).state is STATE_ON + + assert (state := hass.states.get("switch.test_switch_0")) + assert state.state == STATE_ON assert not mock_stop_scanner.call_count monkeypatch.setattr(mock_rpc_device, "connected", False) @@ -473,7 +480,9 @@ async def test_entry_missing_gen(hass: HomeAssistant, mock_block_device: Mock) - entry = await init_integration(hass, None) assert entry.state is ConfigEntryState.LOADED - assert hass.states.get("switch.test_name_channel_1").state is STATE_ON + + assert (state := hass.states.get("switch.test_name_channel_1")) + assert state.state == STATE_ON async def test_entry_missing_port(hass: HomeAssistant) -> None: diff --git a/tests/components/shelly/test_light.py b/tests/components/shelly/test_light.py index 482821aa966..0dab06f53a9 100644 --- a/tests/components/shelly/test_light.py +++ b/tests/components/shelly/test_light.py @@ -65,18 +65,17 @@ async def test_block_device_rgbw_bulb( await init_integration(hass, 1, model=MODEL_BULB) # Test initial - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_RGBW_COLOR] == (45, 55, 65, 70) - assert attributes[ATTR_BRIGHTNESS] == 48 - assert attributes[ATTR_SUPPORTED_COLOR_MODES] == [ + assert state.attributes[ATTR_RGBW_COLOR] == (45, 55, 65, 70) + assert state.attributes[ATTR_BRIGHTNESS] == 48 + assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ ColorMode.COLOR_TEMP, ColorMode.RGBW, ] - assert attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.EFFECT - assert len(attributes[ATTR_EFFECT_LIST]) == 7 - assert attributes[ATTR_EFFECT] == "Off" + assert state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.EFFECT + assert len(state.attributes[ATTR_EFFECT_LIST]) == 7 + assert state.attributes[ATTR_EFFECT] == "Off" # Turn off mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.reset_mock() @@ -89,7 +88,7 @@ async def test_block_device_rgbw_bulb( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="off" ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF # Turn on, RGBW = [70, 80, 90, 20], brightness = 33, effect = Flash @@ -108,13 +107,12 @@ async def test_block_device_rgbw_bulb( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="on", gain=13, brightness=13, red=70, green=80, blue=90, white=30, effect=3 ) - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_COLOR_MODE] == ColorMode.RGBW - assert attributes[ATTR_RGBW_COLOR] == (70, 80, 90, 30) - assert attributes[ATTR_BRIGHTNESS] == 33 - assert attributes[ATTR_EFFECT] == "Flash" + assert state.attributes[ATTR_COLOR_MODE] == ColorMode.RGBW + assert state.attributes[ATTR_RGBW_COLOR] == (70, 80, 90, 30) + assert state.attributes[ATTR_BRIGHTNESS] == 33 + assert state.attributes[ATTR_EFFECT] == "Flash" # Turn on, COLOR_TEMP_KELVIN = 3500 mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.reset_mock() @@ -127,14 +125,12 @@ async def test_block_device_rgbw_bulb( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="on", temp=3500, mode="white" ) - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_COLOR_MODE] == ColorMode.COLOR_TEMP - assert attributes[ATTR_COLOR_TEMP_KELVIN] == 3500 + assert state.attributes[ATTR_COLOR_MODE] == ColorMode.COLOR_TEMP + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 3500 - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-light_0" @@ -154,21 +150,20 @@ async def test_block_device_rgb_bulb( await init_integration(hass, 1, model=MODEL_BULB_RGBW) # Test initial - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_RGB_COLOR] == (45, 55, 65) - assert attributes[ATTR_BRIGHTNESS] == 48 - assert attributes[ATTR_SUPPORTED_COLOR_MODES] == [ + assert state.attributes[ATTR_RGB_COLOR] == (45, 55, 65) + assert state.attributes[ATTR_BRIGHTNESS] == 48 + assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ ColorMode.COLOR_TEMP, ColorMode.RGB, ] assert ( - attributes[ATTR_SUPPORTED_FEATURES] + state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.EFFECT | LightEntityFeature.TRANSITION ) - assert len(attributes[ATTR_EFFECT_LIST]) == 4 - assert attributes[ATTR_EFFECT] == "Off" + assert len(state.attributes[ATTR_EFFECT_LIST]) == 4 + assert state.attributes[ATTR_EFFECT] == "Off" # Turn off mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.reset_mock() @@ -181,7 +176,7 @@ async def test_block_device_rgb_bulb( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="off" ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF # Turn on, RGB = [70, 80, 90], brightness = 33, effect = Flash @@ -200,13 +195,12 @@ async def test_block_device_rgb_bulb( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="on", gain=13, brightness=13, red=70, green=80, blue=90, effect=3 ) - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_COLOR_MODE] == ColorMode.RGB - assert attributes[ATTR_RGB_COLOR] == (70, 80, 90) - assert attributes[ATTR_BRIGHTNESS] == 33 - assert attributes[ATTR_EFFECT] == "Flash" + assert state.attributes[ATTR_COLOR_MODE] == ColorMode.RGB + assert state.attributes[ATTR_RGB_COLOR] == (70, 80, 90) + assert state.attributes[ATTR_BRIGHTNESS] == 33 + assert state.attributes[ATTR_EFFECT] == "Flash" # Turn on, COLOR_TEMP_KELVIN = 3500 mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.reset_mock() @@ -219,11 +213,10 @@ async def test_block_device_rgb_bulb( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="on", temp=3500, mode="white" ) - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_COLOR_MODE] == ColorMode.COLOR_TEMP - assert attributes[ATTR_COLOR_TEMP_KELVIN] == 3500 + assert state.attributes[ATTR_COLOR_MODE] == ColorMode.COLOR_TEMP + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 3500 # Turn on with unsupported effect mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.reset_mock() @@ -236,14 +229,13 @@ async def test_block_device_rgb_bulb( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="on", mode="color" ) - state = hass.states.get(entity_id) - attributes = state.attributes + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_EFFECT] == "Off" + assert state.attributes[ATTR_EFFECT] == "Off" assert "Effect 'Breath' not supported" in caplog.text - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-light_1" @@ -272,12 +264,11 @@ async def test_block_device_white_bulb( await init_integration(hass, 1, model=MODEL_VINTAGE_V2) # Test initial - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_BRIGHTNESS] == 128 - assert attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.BRIGHTNESS] - assert attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION + assert state.attributes[ATTR_BRIGHTNESS] == 128 + assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.BRIGHTNESS] + assert state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION # Turn off mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.reset_mock() @@ -290,7 +281,7 @@ async def test_block_device_white_bulb( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="off" ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF # Turn on, brightness = 33 @@ -304,13 +295,11 @@ async def test_block_device_white_bulb( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="on", gain=13, brightness=13 ) - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_BRIGHTNESS] == 33 + assert state.attributes[ATTR_BRIGHTNESS] == 33 - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-light_1" @@ -343,9 +332,8 @@ async def test_block_device_support_transition( await init_integration(hass, 1, model=model) # Test initial - state = hass.states.get(entity_id) - attributes = state.attributes - assert attributes[ATTR_SUPPORTED_FEATURES] & LightEntityFeature.TRANSITION + assert (state := hass.states.get(entity_id)) + assert state.attributes[ATTR_SUPPORTED_FEATURES] & LightEntityFeature.TRANSITION # Turn on, TRANSITION = 4 mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.reset_mock() @@ -358,7 +346,7 @@ async def test_block_device_support_transition( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="on", transition=4000 ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON # Turn off, TRANSITION = 6, limit to 5000ms @@ -372,11 +360,10 @@ async def test_block_device_support_transition( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="off", transition=5000 ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-light_1" @@ -403,14 +390,14 @@ async def test_block_device_relay_app_type_light( mock_block_device.blocks[RELAY_BLOCK_ID], "description", "relay_1" ) await init_integration(hass, 1) + assert hass.states.get("switch.test_name_channel_1") is None # Test initial - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.ONOFF] - assert attributes[ATTR_SUPPORTED_FEATURES] == 0 + assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.ONOFF] + assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0 # Turn off mock_block_device.blocks[RELAY_BLOCK_ID].set_state.reset_mock() @@ -423,7 +410,7 @@ async def test_block_device_relay_app_type_light( mock_block_device.blocks[RELAY_BLOCK_ID].set_state.assert_called_once_with( turn="off" ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF # Turn on @@ -437,11 +424,10 @@ async def test_block_device_relay_app_type_light( mock_block_device.blocks[RELAY_BLOCK_ID].set_state.assert_called_once_with( turn="on" ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-relay_1" @@ -451,6 +437,7 @@ async def test_block_device_no_light_blocks( """Test block device without light blocks.""" monkeypatch.setattr(mock_block_device.blocks[LIGHT_BLOCK_ID], "type", "roller") await init_integration(hass, 1) + assert hass.states.get("light.test_name_channel_1") is None @@ -473,7 +460,9 @@ async def test_rpc_device_switch_type_lights_mode( {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get(entity_id).state == STATE_ON + + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON mutate_rpc_device_status(monkeypatch, mock_rpc_device, "switch:0", "output", False) await hass.services.async_call( @@ -483,10 +472,11 @@ async def test_rpc_device_switch_type_lights_mode( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_OFF - entry = entity_registry.async_get(entity_id) - assert entry + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF + + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-switch:0" @@ -510,7 +500,8 @@ async def test_rpc_light( ) mock_rpc_device.call_rpc.assert_called_once_with("Light.Set", {"id": 0, "on": True}) - state = hass.states.get(entity_id) + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_BRIGHTNESS] == 135 @@ -528,7 +519,8 @@ async def test_rpc_light( mock_rpc_device.call_rpc.assert_called_once_with( "Light.Set", {"id": 0, "on": False} ) - state = hass.states.get(entity_id) + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF # Turn on, brightness = 33 @@ -547,7 +539,8 @@ async def test_rpc_light( mock_rpc_device.call_rpc.assert_called_once_with( "Light.Set", {"id": 0, "on": True, "brightness": 13} ) - state = hass.states.get(entity_id) + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_BRIGHTNESS] == 33 @@ -565,7 +558,8 @@ async def test_rpc_light( mock_rpc_device.call_rpc.assert_called_once_with( "Light.Set", {"id": 0, "on": True, "transition_duration": 10.1} ) - state = hass.states.get(entity_id) + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON # Turn off, transition = 0.4, should be limited to 0.5 @@ -584,11 +578,10 @@ async def test_rpc_light( "Light.Set", {"id": 0, "on": False, "transition_duration": 0.5} ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-light:0" @@ -606,12 +599,11 @@ async def test_rpc_device_rgb_profile( await init_integration(hass, 2) # Test initial - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_RGB_COLOR] == (45, 55, 65) - assert attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.RGB] - assert attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION + assert state.attributes[ATTR_RGB_COLOR] == (45, 55, 65) + assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.RGB] + assert state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION # Turn on, RGB = [70, 80, 90] await hass.services.async_call( @@ -628,14 +620,12 @@ async def test_rpc_device_rgb_profile( "RGB.Set", {"id": 0, "on": True, "rgb": [70, 80, 90]} ) - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_COLOR_MODE] == ColorMode.RGB - assert attributes[ATTR_RGB_COLOR] == (70, 80, 90) + assert state.attributes[ATTR_COLOR_MODE] == ColorMode.RGB + assert state.attributes[ATTR_RGB_COLOR] == (70, 80, 90) - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-rgb:0" @@ -653,12 +643,11 @@ async def test_rpc_device_rgbw_profile( await init_integration(hass, 2) # Test initial - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_RGBW_COLOR] == (21, 22, 23, 120) - assert attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.RGBW] - assert attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION + assert state.attributes[ATTR_RGBW_COLOR] == (21, 22, 23, 120) + assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.RGBW] + assert state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION # Turn on, RGBW = [72, 82, 92, 128] await hass.services.async_call( @@ -678,14 +667,12 @@ async def test_rpc_device_rgbw_profile( "RGBW.Set", {"id": 0, "on": True, "rgb": [72, 82, 92], "white": 128} ) - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_COLOR_MODE] == ColorMode.RGBW - assert attributes[ATTR_RGBW_COLOR] == (72, 82, 92, 128) + assert state.attributes[ATTR_COLOR_MODE] == ColorMode.RGBW + assert state.attributes[ATTR_RGBW_COLOR] == (72, 82, 92, 128) - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-rgbw:0" @@ -730,9 +717,11 @@ async def test_rpc_rgbw_device_light_mode_remove_others( # verify we have 4 lights for i in range(SHELLY_PLUS_RGBW_CHANNELS): entity_id = f"light.test_light_{i}" - assert hass.states.get(entity_id).state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON + + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == f"123456789ABC-light:{i}" # verify RGB & RGBW entities removed @@ -793,9 +782,11 @@ async def test_rpc_rgbw_device_rgb_w_modes_remove_others( # verify we have RGB/w light entity_id = f"light.test_{active_mode}_0" - assert hass.states.get(entity_id).state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON + + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == f"123456789ABC-{active_mode}:0" # verify light & RGB/W entities removed @@ -823,8 +814,7 @@ async def test_rpc_cct_light( await init_integration(hass, 2) - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-cct:0" # Turn off @@ -836,7 +826,8 @@ async def test_rpc_cct_light( ) mock_rpc_device.call_rpc.assert_called_once_with("CCT.Set", {"id": 0, "on": False}) - state = hass.states.get(entity_id) + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF # Turn on @@ -851,7 +842,8 @@ async def test_rpc_cct_light( mock_rpc_device.mock_update() mock_rpc_device.call_rpc.assert_called_once_with("CCT.Set", {"id": 0, "on": True}) - state = hass.states.get(entity_id) + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_COLOR_MODE] == ColorMode.COLOR_TEMP assert state.attributes[ATTR_BRIGHTNESS] == 196 # 77% of 255 @@ -874,7 +866,8 @@ async def test_rpc_cct_light( mock_rpc_device.call_rpc.assert_called_once_with( "CCT.Set", {"id": 0, "on": True, "brightness": 88} ) - state = hass.states.get(entity_id) + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_BRIGHTNESS] == 224 # 88% of 255 @@ -894,7 +887,8 @@ async def test_rpc_cct_light( mock_rpc_device.call_rpc.assert_called_once_with( "CCT.Set", {"id": 0, "on": True, "ct": 4444} ) - state = hass.states.get(entity_id) + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 4444 diff --git a/tests/components/shelly/test_number.py b/tests/components/shelly/test_number.py index c032a137bfc..41002917d86 100644 --- a/tests/components/shelly/test_number.py +++ b/tests/components/shelly/test_number.py @@ -54,15 +54,16 @@ async def test_block_number_update( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == "50" + assert (state := hass.states.get(entity_id)) + assert state.state == "50" monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "valvePos", 30) mock_block_device.mock_update() - assert hass.states.get(entity_id).state == "30" + assert (state := hass.states.get(entity_id)) + assert state.state == "30" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-device_0-valvePos" @@ -103,14 +104,16 @@ async def test_block_restored_number( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == "40" + assert (state := hass.states.get(entity_id)) + assert state.state == "40" # Make device online monkeypatch.setattr(mock_block_device, "initialized", True) mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == "50" + assert (state := hass.states.get(entity_id)) + assert state.state == "50" async def test_block_restored_number_no_last_state( @@ -141,14 +144,16 @@ async def test_block_restored_number_no_last_state( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNKNOWN # Make device online monkeypatch.setattr(mock_block_device, "initialized", True) mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == "50" + assert (state := hass.states.get(entity_id)) + assert state.state == "50" async def test_block_number_set_value( @@ -200,7 +205,10 @@ async def test_block_set_value_connection_error( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - with pytest.raises(HomeAssistantError): + with pytest.raises( + HomeAssistantError, + match="Device communication error occurred while calling action for number.test_name_valve_position of Test name", + ): await hass.services.async_call( NUMBER_DOMAIN, SERVICE_SET_VALUE, @@ -302,8 +310,7 @@ async def test_rpc_device_virtual_number( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "12.3" assert state.attributes.get(ATTR_MIN) == 0 assert state.attributes.get(ATTR_MAX) == 100 @@ -311,13 +318,13 @@ async def test_rpc_device_virtual_number( assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit assert state.attributes.get(ATTR_MODE) is mode - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-number:203-number" monkeypatch.setitem(mock_rpc_device.status["number:203"], "value", 78.9) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "78.9" + assert (state := hass.states.get(entity_id)) + assert state.state == "78.9" monkeypatch.setitem(mock_rpc_device.status["number:203"], "value", 56.7) await hass.services.async_call( @@ -327,7 +334,8 @@ async def test_rpc_device_virtual_number( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "56.7" + assert (state := hass.states.get(entity_id)) + assert state.state == "56.7" async def test_rpc_remove_virtual_number_when_mode_label( @@ -365,8 +373,7 @@ async def test_rpc_remove_virtual_number_when_mode_label( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_rpc_remove_virtual_number_when_orphaned( @@ -390,8 +397,7 @@ async def test_rpc_remove_virtual_number_when_orphaned( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_blu_trv_number_entity( @@ -427,7 +433,8 @@ async def test_blu_trv_ext_temp_set_value( # After HA start the state should be unknown because there was no previous external # temperature report - assert hass.states.get(entity_id).state is STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNKNOWN await hass.services.async_call( NUMBER_DOMAIN, @@ -449,7 +456,8 @@ async def test_blu_trv_ext_temp_set_value( BLU_TRV_TIMEOUT, ) - assert hass.states.get(entity_id).state == "22.2" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.2" async def test_blu_trv_valve_pos_set_value( @@ -465,7 +473,8 @@ async def test_blu_trv_valve_pos_set_value( entity_id = f"{NUMBER_DOMAIN}.trv_name_valve_position" - assert hass.states.get(entity_id).state == "0" + assert (state := hass.states.get(entity_id)) + assert state.state == "0" monkeypatch.setitem(mock_blu_trv.status["blutrv:200"], "pos", 20) await hass.services.async_call( @@ -490,4 +499,5 @@ async def test_blu_trv_valve_pos_set_value( # device only accepts int for 'pos' value assert isinstance(mock_blu_trv.call_rpc.call_args[0][1]["params"]["pos"], int) - assert hass.states.get(entity_id).state == "20" + assert (state := hass.states.get(entity_id)) + assert state.state == "20" diff --git a/tests/components/shelly/test_select.py b/tests/components/shelly/test_select.py index 0a6eb2a5843..39e426baa58 100644 --- a/tests/components/shelly/test_select.py +++ b/tests/components/shelly/test_select.py @@ -56,8 +56,7 @@ async def test_rpc_device_virtual_enum( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == expected_state assert state.attributes.get(ATTR_OPTIONS) == [ "Title 1", @@ -65,13 +64,14 @@ async def test_rpc_device_virtual_enum( "option 3", ] - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-enum:203-enum" monkeypatch.setitem(mock_rpc_device.status["enum:203"], "value", "option 2") mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "option 2" + + assert (state := hass.states.get(entity_id)) + assert state.state == "option 2" monkeypatch.setitem(mock_rpc_device.status["enum:203"], "value", "option 1") await hass.services.async_call( @@ -83,7 +83,9 @@ async def test_rpc_device_virtual_enum( # 'Title 1' corresponds to 'option 1' assert mock_rpc_device.call_rpc.call_args[0][1] == {"id": 203, "value": "option 1"} mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "Title 1" + + assert (state := hass.states.get(entity_id)) + assert state.state == "Title 1" async def test_rpc_remove_virtual_enum_when_mode_label( @@ -122,8 +124,7 @@ async def test_rpc_remove_virtual_enum_when_mode_label( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_rpc_remove_virtual_enum_when_orphaned( @@ -147,5 +148,4 @@ async def test_rpc_remove_virtual_enum_when_orphaned( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None diff --git a/tests/components/shelly/test_sensor.py b/tests/components/shelly/test_sensor.py index 5c1f03de3e8..7edd38a4b31 100644 --- a/tests/components/shelly/test_sensor.py +++ b/tests/components/shelly/test_sensor.py @@ -40,7 +40,6 @@ from homeassistant.helpers.entity_registry import EntityRegistry from homeassistant.setup import async_setup_component from . import ( - get_entity_state, init_integration, mock_polling_rpc_update, mock_rest_update, @@ -66,15 +65,16 @@ async def test_block_sensor( entity_id = f"{SENSOR_DOMAIN}.test_name_channel_1_power" await init_integration(hass, 1) - assert hass.states.get(entity_id).state == "53.4" + assert (state := hass.states.get(entity_id)) + assert state.state == "53.4" monkeypatch.setattr(mock_block_device.blocks[RELAY_BLOCK_ID], "power", 60.1) mock_block_device.mock_update() - assert hass.states.get(entity_id).state == "60.1" + assert (state := hass.states.get(entity_id)) + assert state.state == "60.1" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-relay_0-power" @@ -85,14 +85,13 @@ async def test_energy_sensor( entity_id = f"{SENSOR_DOMAIN}.test_name_channel_1_energy" await init_integration(hass, 1) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) # 1234567.89 Wmin / 60 / 1000 = 20.5761315 kWh assert state.state == "20.5761315" # suggested unit is KWh assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.KILO_WATT_HOUR - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-relay_0-energy" @@ -111,13 +110,12 @@ async def test_power_factory_unit_migration( entity_id = f"{SENSOR_DOMAIN}.test_name_power_factor" await init_integration(hass, 1) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) # Value of 0.98 is converted to 98.0% assert state.state == "98.0" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-emeter_0-powerFactor" @@ -128,12 +126,11 @@ async def test_power_factory_without_unit_migration( entity_id = f"{SENSOR_DOMAIN}.test_name_power_factor" await init_integration(hass, 1) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == "0.98" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-emeter_0-powerFactor" @@ -147,12 +144,14 @@ async def test_block_rest_sensor( entity_id = register_entity(hass, SENSOR_DOMAIN, "test_name_rssi", "rssi") await init_integration(hass, 1) - assert hass.states.get(entity_id).state == "-64" + assert (state := hass.states.get(entity_id)) + assert state.state == "-64" monkeypatch.setitem(mock_block_device.status["wifi_sta"], "rssi", -71) await mock_rest_update(hass, freezer) - assert hass.states.get(entity_id).state == "-71" + assert (state := hass.states.get(entity_id)) + assert state.state == "-71" async def test_block_sleeping_sensor( @@ -175,15 +174,16 @@ async def test_block_sleeping_sensor( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == "22.1" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.1" monkeypatch.setattr(mock_block_device.blocks[SENSOR_BLOCK_ID], "temp", 23.4) mock_block_device.mock_update() - assert hass.states.get(entity_id).state == "23.4" + assert (state := hass.states.get(entity_id)) + assert state.state == "23.4" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-sensor_0-temp" @@ -211,8 +211,7 @@ async def test_block_restored_sleeping_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "20.4" assert state.attributes[ATTR_STATE_CLASS] == SensorStateClass.MEASUREMENT assert state.attributes[ATTR_DEVICE_CLASS] == SensorDeviceClass.TEMPERATURE @@ -222,7 +221,8 @@ async def test_block_restored_sleeping_sensor( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == "22.1" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.1" async def test_block_restored_sleeping_sensor_no_last_state( @@ -246,14 +246,16 @@ async def test_block_restored_sleeping_sensor_no_last_state( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNKNOWN # Make device online monkeypatch.setattr(mock_block_device, "initialized", True) mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == "22.1" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.1" async def test_block_sensor_error( @@ -266,15 +268,16 @@ async def test_block_sensor_error( entity_id = f"{SENSOR_DOMAIN}.test_name_battery" await init_integration(hass, 1) - assert hass.states.get(entity_id).state == "98" + assert (state := hass.states.get(entity_id)) + assert state.state == "98" monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "battery", -1) mock_block_device.mock_update() - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-device_0-battery" @@ -321,7 +324,8 @@ async def test_block_not_matched_restored_sleeping_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == "20.4" + assert (state := hass.states.get(entity_id)) + assert state.state == "20.4" # Make device online monkeypatch.setattr( @@ -331,7 +335,8 @@ async def test_block_not_matched_restored_sleeping_sensor( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == "20.4" + assert (state := hass.states.get(entity_id)) + assert state.state == "20.4" async def test_block_sensor_without_value( @@ -403,7 +408,8 @@ async def test_block_sensor_values( monkeypatch.setattr(mock_block_device.blocks[block_id], attribute, value) mock_block_device.mock_update() - assert hass.states.get(entity_id).state == final_value + assert (state := hass.states.get(entity_id)) + assert state.state == final_value @pytest.mark.parametrize( @@ -430,7 +436,8 @@ async def test_block_shelly_air_lamp_life( ) await init_integration(hass, 1) - assert hass.states.get(entity_id).state == percentage + assert (state := hass.states.get(entity_id)) + assert state.state == percentage async def test_rpc_sensor( @@ -440,17 +447,20 @@ async def test_rpc_sensor( entity_id = f"{SENSOR_DOMAIN}.test_cover_0_power" await init_integration(hass, 2) - assert hass.states.get(entity_id).state == "85.3" + assert (state := hass.states.get(entity_id)) + assert state.state == "85.3" mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "apower", "88.2") mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "88.2" + assert (state := hass.states.get(entity_id)) + assert state.state == "88.2" mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "apower", None) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNKNOWN @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -464,7 +474,8 @@ async def test_rpc_rssi_sensor_removal( entry = await init_integration(hass, 2) # WiFi1 enabled, do not remove sensor - assert get_entity_state(hass, entity_id) == "-63" + assert (state := hass.states.get(entity_id)) + assert state.state == "-63" # WiFi1 & WiFi2 disabled - remove sensor monkeypatch.setitem(mock_rpc_device.config["wifi"]["sta"], "enable", False) @@ -476,7 +487,9 @@ async def test_rpc_rssi_sensor_removal( monkeypatch.setitem(mock_rpc_device.config["wifi"]["sta1"], "enable", True) await hass.config_entries.async_reload(entry.entry_id) await hass.async_block_till_done() - assert get_entity_state(hass, entity_id) == "-63" + + assert (state := hass.states.get(entity_id)) + assert state.state == "-63" async def test_rpc_illuminance_sensor( @@ -486,10 +499,10 @@ async def test_rpc_illuminance_sensor( entity_id = f"{SENSOR_DOMAIN}.test_name_illuminance" await init_integration(hass, 2) - assert hass.states.get(entity_id).state == "345" + assert (state := hass.states.get(entity_id)) + assert state.state == "345" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-illuminance:0-illuminance" @@ -503,17 +516,18 @@ async def test_rpc_sensor_error( entity_id = f"{SENSOR_DOMAIN}.test_name_voltmeter" await init_integration(hass, 2) - assert hass.states.get(entity_id).state == "4.321" + assert (state := hass.states.get(entity_id)) + assert state.state == "4.321" mutate_rpc_device_status( monkeypatch, mock_rpc_device, "voltmeter:100", "voltage", None ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-voltmeter:100-voltmeter" @@ -528,15 +542,16 @@ async def test_rpc_polling_sensor( entity_id = register_entity(hass, SENSOR_DOMAIN, "test_name_rssi", "wifi-rssi") await init_integration(hass, 2) - assert hass.states.get(entity_id).state == "-63" + assert (state := hass.states.get(entity_id)) + assert state.state == "-63" mutate_rpc_device_status(monkeypatch, mock_rpc_device, "wifi", "rssi", "-70") await mock_polling_rpc_update(hass, freezer) - assert hass.states.get(entity_id).state == "-70" + assert (state := hass.states.get(entity_id)) + assert state.state == "-70" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-wifi-rssi" @@ -567,12 +582,14 @@ async def test_rpc_sleeping_sensor( mock_rpc_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == "22.9" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.9" mutate_rpc_device_status(monkeypatch, mock_rpc_device, "temperature:0", "tC", 23.4) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "23.4" + assert (state := hass.states.get(entity_id)) + assert state.state == "23.4" async def test_rpc_restored_sleeping_sensor( @@ -600,7 +617,8 @@ async def test_rpc_restored_sleeping_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == "21.0" + assert (state := hass.states.get(entity_id)) + assert state.state == "21.0" # Make device online monkeypatch.setattr(mock_rpc_device, "initialized", True) @@ -611,7 +629,8 @@ async def test_rpc_restored_sleeping_sensor( mock_rpc_device.mock_update() await hass.async_block_till_done() - assert hass.states.get(entity_id).state == "22.9" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.9" async def test_rpc_restored_sleeping_sensor_no_last_state( @@ -637,7 +656,8 @@ async def test_rpc_restored_sleeping_sensor_no_last_state( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNKNOWN # Make device online monkeypatch.setattr(mock_rpc_device, "initialized", True) @@ -648,7 +668,8 @@ async def test_rpc_restored_sleeping_sensor_no_last_state( mock_rpc_device.mock_update() await hass.async_block_till_done() - assert hass.states.get(entity_id).state == "22.9" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.9" @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -658,36 +679,32 @@ async def test_rpc_em1_sensors( """Test RPC sensors for EM1 component.""" await init_integration(hass, 2) - state = hass.states.get("sensor.test_name_em0_power") - assert state + assert (state := hass.states.get("sensor.test_name_em0_power")) assert state.state == "85.3" - entry = entity_registry.async_get("sensor.test_name_em0_power") - assert entry + assert (entry := entity_registry.async_get("sensor.test_name_em0_power")) assert entry.unique_id == "123456789ABC-em1:0-power_em1" - state = hass.states.get("sensor.test_name_em1_power") - assert state + assert (state := hass.states.get("sensor.test_name_em1_power")) assert state.state == "123.3" - entry = entity_registry.async_get("sensor.test_name_em1_power") - assert entry + assert (entry := entity_registry.async_get("sensor.test_name_em1_power")) assert entry.unique_id == "123456789ABC-em1:1-power_em1" - state = hass.states.get("sensor.test_name_em0_total_active_energy") - assert state + assert (state := hass.states.get("sensor.test_name_em0_total_active_energy")) assert state.state == "123.4564" - entry = entity_registry.async_get("sensor.test_name_em0_total_active_energy") - assert entry + assert ( + entry := entity_registry.async_get("sensor.test_name_em0_total_active_energy") + ) assert entry.unique_id == "123456789ABC-em1data:0-total_act_energy" - state = hass.states.get("sensor.test_name_em1_total_active_energy") - assert state + assert (state := hass.states.get("sensor.test_name_em1_total_active_energy")) assert state.state == "987.6543" - entry = entity_registry.async_get("sensor.test_name_em1_total_active_energy") - assert entry + assert ( + entry := entity_registry.async_get("sensor.test_name_em1_total_active_energy") + ) assert entry.unique_id == "123456789ABC-em1data:1-total_act_energy" @@ -713,7 +730,7 @@ async def test_rpc_sleeping_update_entity_service( mock_rpc_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == "22.9" await hass.services.async_call( @@ -724,11 +741,10 @@ async def test_rpc_sleeping_update_entity_service( ) # Entity should be available after update_entity service call - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == "22.9" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-temperature:0-temperature_0" assert ( @@ -762,7 +778,8 @@ async def test_block_sleeping_update_entity_service( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == "22.1" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.1" await hass.services.async_call( HA_DOMAIN, @@ -772,11 +789,10 @@ async def test_block_sleeping_update_entity_service( ) # Entity should be available after update_entity service call - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == "22.1" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-sensor_0-temp" assert ( @@ -809,20 +825,18 @@ async def test_rpc_analog_input_sensors( await init_integration(hass, 2) entity_id = f"{SENSOR_DOMAIN}.test_name_input_1_analog" - assert hass.states.get(entity_id).state == "89" + assert (state := hass.states.get(entity_id)) + assert state.state == "89" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-input:1-analoginput" entity_id = f"{SENSOR_DOMAIN}.test_name_input_1_analog_value" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "8.9" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-input:1-analoginput_xpercent" @@ -857,7 +871,8 @@ async def test_rpc_disabled_xpercent( await init_integration(hass, 2) entity_id = f"{SENSOR_DOMAIN}.test_name_input_1_analog" - assert hass.states.get(entity_id).state == "89" + assert (state := hass.states.get(entity_id)) + assert state.state == "89" entity_id = f"{SENSOR_DOMAIN}.test_name_input_1_analog_value" assert hass.states.get(entity_id) is None @@ -887,23 +902,20 @@ async def test_rpc_pulse_counter_sensors( await init_integration(hass, 2) entity_id = f"{SENSOR_DOMAIN}.gas_pulse_counter" - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == "56174" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "pulse" assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.TOTAL - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-input:2-pulse_counter" entity_id = f"{SENSOR_DOMAIN}.gas_counter_value" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "561.74" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-input:2-counter_value" @@ -938,7 +950,8 @@ async def test_rpc_disabled_xtotal_counter( await init_integration(hass, 2) entity_id = f"{SENSOR_DOMAIN}.gas_pulse_counter" - assert hass.states.get(entity_id).state == "20635" + assert (state := hass.states.get(entity_id)) + assert state.state == "20635" entity_id = f"{SENSOR_DOMAIN}.gas_counter_value" assert hass.states.get(entity_id) is None @@ -968,23 +981,20 @@ async def test_rpc_pulse_counter_frequency_sensors( await init_integration(hass, 2) entity_id = f"{SENSOR_DOMAIN}.gas_pulse_counter_frequency" - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == "208.0" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfFrequency.HERTZ assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-input:2-counter_frequency" entity_id = f"{SENSOR_DOMAIN}.gas_pulse_counter_frequency_value" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "6.11" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-input:2-counter_frequency_value" @@ -1007,11 +1017,9 @@ async def test_rpc_disabled_xfreq( entity_id = f"{SENSOR_DOMAIN}.gas_pulse_counter_frequency_value" - state = hass.states.get(entity_id) - assert not state + assert hass.states.get(entity_id) is None - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None @pytest.mark.parametrize( @@ -1043,17 +1051,16 @@ async def test_rpc_device_virtual_text_sensor( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "lorem ipsum" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-text:203-text" monkeypatch.setitem(mock_rpc_device.status["text:203"], "value", "dolor sit amet") mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "dolor sit amet" + assert (state := hass.states.get(entity_id)) + assert state.state == "dolor sit amet" async def test_rpc_remove_text_virtual_sensor_when_mode_field( @@ -1086,8 +1093,7 @@ async def test_rpc_remove_text_virtual_sensor_when_mode_field( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_rpc_remove_text_virtual_sensor_when_orphaned( @@ -1111,8 +1117,7 @@ async def test_rpc_remove_text_virtual_sensor_when_orphaned( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None @pytest.mark.parametrize( @@ -1148,18 +1153,17 @@ async def test_rpc_device_virtual_number_sensor( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "34.5" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-number:203-number" monkeypatch.setitem(mock_rpc_device.status["number:203"], "value", 56.7) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "56.7" + assert (state := hass.states.get(entity_id)) + assert state.state == "56.7" async def test_rpc_remove_number_virtual_sensor_when_mode_field( @@ -1197,8 +1201,7 @@ async def test_rpc_remove_number_virtual_sensor_when_mode_field( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_rpc_remove_number_virtual_sensor_when_orphaned( @@ -1222,8 +1225,7 @@ async def test_rpc_remove_number_virtual_sensor_when_orphaned( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None @pytest.mark.parametrize( @@ -1263,19 +1265,18 @@ async def test_rpc_device_virtual_enum_sensor( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == expected_state assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENUM assert state.attributes.get(ATTR_OPTIONS) == ["Title 1", "two", "three"] - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-enum:203-enum" monkeypatch.setitem(mock_rpc_device.status["enum:203"], "value", "two") mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "two" + assert (state := hass.states.get(entity_id)) + assert state.state == "two" async def test_rpc_remove_enum_virtual_sensor_when_mode_dropdown( @@ -1317,8 +1318,7 @@ async def test_rpc_remove_enum_virtual_sensor_when_mode_dropdown( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_rpc_remove_enum_virtual_sensor_when_orphaned( @@ -1342,8 +1342,7 @@ async def test_rpc_remove_enum_virtual_sensor_when_orphaned( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -1374,61 +1373,51 @@ async def test_rpc_rgbw_sensors( entity_id = f"sensor.test_name_{light_type}_light_0_power" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "12.2" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfPower.WATT - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == f"123456789ABC-{light_type}:0-power_{light_type}" entity_id = f"sensor.test_name_{light_type}_light_0_energy" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "0.045141" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.KILO_WATT_HOUR - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == f"123456789ABC-{light_type}:0-energy_{light_type}" entity_id = f"sensor.test_name_{light_type}_light_0_current" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "0.23" assert ( state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfElectricCurrent.AMPERE ) - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == f"123456789ABC-{light_type}:0-current_{light_type}" entity_id = f"sensor.test_name_{light_type}_light_0_voltage" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "12.4" assert ( state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfElectricPotential.VOLT ) - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == f"123456789ABC-{light_type}:0-voltage_{light_type}" entity_id = f"sensor.test_name_{light_type}_light_0_device_temperature" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "54.3" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == f"123456789ABC-{light_type}:0-temperature_{light_type}" @@ -1441,15 +1430,17 @@ async def test_rpc_device_sensor_goes_unavailable_on_disconnect( ) -> None: """Test RPC device with sensor goes unavailable on disconnect.""" await init_integration(hass, 2) - temp_sensor_state = hass.states.get("sensor.test_name_temperature") - assert temp_sensor_state is not None - assert temp_sensor_state.state != STATE_UNAVAILABLE + + assert (state := hass.states.get("sensor.test_name_temperature")) + assert state.state != STATE_UNAVAILABLE + monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setattr(mock_rpc_device, "initialized", False) mock_rpc_device.mock_disconnected() await hass.async_block_till_done() - temp_sensor_state = hass.states.get("sensor.test_name_temperature") - assert temp_sensor_state.state == STATE_UNAVAILABLE + + assert (state := hass.states.get("sensor.test_name_temperature")) + assert state.state == STATE_UNAVAILABLE freezer.tick(60) async_fire_time_changed(hass) @@ -1460,8 +1451,9 @@ async def test_rpc_device_sensor_goes_unavailable_on_disconnect( monkeypatch.setattr(mock_rpc_device, "initialized", True) mock_rpc_device.mock_initialized() await hass.async_block_till_done() - temp_sensor_state = hass.states.get("sensor.test_name_temperature") - assert temp_sensor_state.state != STATE_UNAVAILABLE + + assert (state := hass.states.get("sensor.test_name_temperature")) + assert state.state != STATE_UNAVAILABLE async def test_rpc_voltmeter_value( @@ -1474,13 +1466,11 @@ async def test_rpc_voltmeter_value( await init_integration(hass, 2) - state = hass.states.get(entity_id) - + assert (state := hass.states.get(entity_id)) assert state.state == "12.34" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "ppm" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-voltmeter:100-voltmeter_value" @@ -1525,8 +1515,7 @@ async def test_rpc_device_virtual_number_sensor_with_device_class( await init_integration(hass, 3) - state = hass.states.get("sensor.test_name_current_humidity") - assert state + assert (state := hass.states.get("sensor.test_name_current_humidity")) assert state.state == "34" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.HUMIDITY diff --git a/tests/components/shelly/test_switch.py b/tests/components/shelly/test_switch.py index 0425f883ad6..824742d1798 100644 --- a/tests/components/shelly/test_switch.py +++ b/tests/components/shelly/test_switch.py @@ -28,7 +28,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceRegistry from homeassistant.helpers.entity_registry import EntityRegistry -from . import get_entity_state, init_integration, register_device, register_entity +from . import init_integration, register_device, register_entity from tests.common import mock_restore_cache @@ -42,22 +42,25 @@ async def test_block_device_services( ) -> None: """Test block device turn on/off services.""" await init_integration(hass, 1) + entity_id = "switch.test_name_channel_1" await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "switch.test_name_channel_1"}, + {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get("switch.test_name_channel_1").state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "switch.test_name_channel_1"}, + {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get("switch.test_name_channel_1").state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF @pytest.mark.parametrize("model", MOTION_MODELS) @@ -75,7 +78,8 @@ async def test_block_motion_switch( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert get_entity_state(hass, entity_id) == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON # turn off await hass.services.async_call( @@ -88,7 +92,9 @@ async def test_block_motion_switch( mock_block_device.mock_update() mock_block_device.set_shelly_motion_detection.assert_called_once_with(False) - assert get_entity_state(hass, entity_id) == STATE_OFF + + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF # turn on mock_block_device.set_shelly_motion_detection.reset_mock() @@ -102,7 +108,9 @@ async def test_block_motion_switch( mock_block_device.mock_update() mock_block_device.set_shelly_motion_detection.assert_called_once_with(True) - assert get_entity_state(hass, entity_id) == STATE_ON + + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON @pytest.mark.parametrize("model", MOTION_MODELS) @@ -132,14 +140,16 @@ async def test_block_restored_motion_switch( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert get_entity_state(hass, entity_id) == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF # Make device online monkeypatch.setattr(mock_block_device, "initialized", True) mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert get_entity_state(hass, entity_id) == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON @pytest.mark.parametrize("model", MOTION_MODELS) @@ -167,14 +177,16 @@ async def test_block_restored_motion_switch_no_last_state( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert get_entity_state(hass, entity_id) == STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNKNOWN # Make device online monkeypatch.setattr(mock_block_device, "initialized", True) mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert get_entity_state(hass, entity_id) == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON @pytest.mark.parametrize( @@ -205,8 +217,7 @@ async def test_block_device_unique_ids( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - entry = entity_registry.async_get(entity) - assert entry + assert (entry := entity_registry.async_get(entity)) assert entry.unique_id == unique_id @@ -221,7 +232,10 @@ async def test_block_set_state_connection_error( ) await init_integration(hass, 1) - with pytest.raises(HomeAssistantError): + with pytest.raises( + HomeAssistantError, + match="Device communication error occurred while calling action for switch.test_name_channel_1 of Test name", + ): await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, @@ -270,11 +284,15 @@ async def test_block_device_update( """Test block device update.""" monkeypatch.setattr(mock_block_device.blocks[RELAY_BLOCK_ID], "output", False) await init_integration(hass, 1) - assert hass.states.get("switch.test_name_channel_1").state == STATE_OFF + + entity_id = "switch.test_name_channel_1" + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF monkeypatch.setattr(mock_block_device.blocks[RELAY_BLOCK_ID], "output", True) mock_block_device.mock_update() - assert hass.states.get("switch.test_name_channel_1").state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON async def test_block_device_no_relay_blocks( @@ -314,23 +332,26 @@ async def test_rpc_device_services( monkeypatch.setitem(mock_rpc_device.status["sys"], "relay_in_thermostat", False) await init_integration(hass, 2) + entity_id = "switch.test_switch_0" await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "switch.test_switch_0"}, + {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get("switch.test_switch_0").state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON monkeypatch.setitem(mock_rpc_device.status["switch:0"], "output", False) await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "switch.test_switch_0"}, + {ATTR_ENTITY_ID: entity_id}, blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get("switch.test_switch_0").state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF async def test_rpc_device_unique_ids( @@ -344,8 +365,7 @@ async def test_rpc_device_unique_ids( monkeypatch.setitem(mock_rpc_device.status["sys"], "relay_in_thermostat", False) await init_integration(hass, 2) - entry = entity_registry.async_get("switch.test_switch_0") - assert entry + assert (entry := entity_registry.async_get("switch.test_switch_0")) assert entry.unique_id == "123456789ABC-switch:0" @@ -357,13 +377,27 @@ async def test_rpc_device_switch_type_lights_mode( mock_rpc_device.config["sys"]["ui_data"], "consumption_types", ["lights"] ) await init_integration(hass, 2) + assert hass.states.get("switch.test_switch_0") is None -@pytest.mark.parametrize("exc", [DeviceConnectionError, RpcCallError(-1, "error")]) +@pytest.mark.parametrize( + ("exc", "error"), + [ + ( + DeviceConnectionError, + "Device communication error occurred while calling action for switch.test_switch_0 of Test name", + ), + ( + RpcCallError(-1, "error"), + "RPC call error occurred while calling action for switch.test_switch_0 of Test name", + ), + ], +) async def test_rpc_set_state_errors( hass: HomeAssistant, exc: Exception, + error: str, mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch, ) -> None: @@ -373,7 +407,7 @@ async def test_rpc_set_state_errors( monkeypatch.setitem(mock_rpc_device.status["sys"], "relay_in_thermostat", False) await init_integration(hass, 2) - with pytest.raises(HomeAssistantError): + with pytest.raises(HomeAssistantError, match=error): await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, @@ -447,7 +481,7 @@ async def test_wall_display_relay_mode( config_entry = await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) - assert hass.states.get(climate_entity_id) is not None + assert (state := hass.states.get(climate_entity_id)) assert len(hass.states.async_entity_ids(CLIMATE_DOMAIN)) == 1 new_status = deepcopy(mock_rpc_device.status) @@ -460,17 +494,16 @@ async def test_wall_display_relay_mode( await hass.async_block_till_done() # the climate entity should be removed + assert hass.states.get(climate_entity_id) is None assert len(hass.states.async_entity_ids(CLIMATE_DOMAIN)) == 0 # the switch entity should be created - state = hass.states.get(switch_entity_id) - assert state + assert (state := hass.states.get(switch_entity_id)) assert state.state == STATE_ON assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 - entry = entity_registry.async_get(switch_entity_id) - assert entry + assert (entry := entity_registry.async_get(switch_entity_id)) assert entry.unique_id == "123456789ABC-switch:0" @@ -503,12 +536,10 @@ async def test_rpc_device_virtual_switch( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-boolean:200-boolean" monkeypatch.setitem(mock_rpc_device.status["boolean:200"], "value", False) @@ -519,7 +550,8 @@ async def test_rpc_device_virtual_switch( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF monkeypatch.setitem(mock_rpc_device.status["boolean:200"], "value", True) await hass.services.async_call( @@ -529,7 +561,8 @@ async def test_rpc_device_virtual_switch( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON async def test_rpc_device_virtual_binary_sensor( @@ -550,8 +583,7 @@ async def test_rpc_device_virtual_binary_sensor( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert not state + assert hass.states.get(entity_id) is None async def test_rpc_remove_virtual_switch_when_mode_label( @@ -584,8 +616,7 @@ async def test_rpc_remove_virtual_switch_when_mode_label( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_rpc_remove_virtual_switch_when_orphaned( @@ -609,8 +640,7 @@ async def test_rpc_remove_virtual_switch_when_orphaned( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -640,11 +670,10 @@ async def test_rpc_device_script_switch( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == f"123456789ABC-{key}-script" monkeypatch.setitem(mock_rpc_device.status[key], "running", False) @@ -655,8 +684,8 @@ async def test_rpc_device_script_switch( blocking=True, ) mock_rpc_device.mock_update() - state = hass.states.get(entity_id) - assert state + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF monkeypatch.setitem(mock_rpc_device.status[key], "running", True) @@ -667,6 +696,6 @@ async def test_rpc_device_script_switch( blocking=True, ) mock_rpc_device.mock_update() - state = hass.states.get(entity_id) - assert state + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON diff --git a/tests/components/shelly/test_text.py b/tests/components/shelly/test_text.py index 19acb856f35..a4812cc4160 100644 --- a/tests/components/shelly/test_text.py +++ b/tests/components/shelly/test_text.py @@ -47,17 +47,17 @@ async def test_rpc_device_virtual_text( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "lorem ipsum" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-text:203-text" monkeypatch.setitem(mock_rpc_device.status["text:203"], "value", "dolor sit amet") mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "dolor sit amet" + + assert (state := hass.states.get(entity_id)) + assert state.state == "dolor sit amet" monkeypatch.setitem(mock_rpc_device.status["text:203"], "value", "sed do eiusmod") await hass.services.async_call( @@ -67,7 +67,9 @@ async def test_rpc_device_virtual_text( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "sed do eiusmod" + + assert (state := hass.states.get(entity_id)) + assert state.state == "sed do eiusmod" async def test_rpc_remove_virtual_text_when_mode_label( @@ -100,8 +102,7 @@ async def test_rpc_remove_virtual_text_when_mode_label( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_rpc_remove_virtual_text_when_orphaned( @@ -125,5 +126,4 @@ async def test_rpc_remove_virtual_text_when_orphaned( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None diff --git a/tests/components/shelly/test_update.py b/tests/components/shelly/test_update.py index 9ea66c1acb7..51016f0cdaa 100644 --- a/tests/components/shelly/test_update.py +++ b/tests/components/shelly/test_update.py @@ -61,14 +61,16 @@ async def test_block_update( monkeypatch.setitem(mock_block_device.status, "cloud", {"connected": False}) await init_integration(hass, 1) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0" assert state.attributes[ATTR_IN_PROGRESS] is False assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None - supported_feat = state.attributes[ATTR_SUPPORTED_FEATURES] - assert supported_feat == UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS + assert ( + state.attributes[ATTR_SUPPORTED_FEATURES] + == UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS + ) await hass.services.async_call( UPDATE_DOMAIN, @@ -78,7 +80,7 @@ async def test_block_update( ) assert mock_block_device.trigger_ota_update.call_count == 1 - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0" @@ -89,15 +91,14 @@ async def test_block_update( monkeypatch.setitem(mock_block_device.status["update"], "old_version", "2.0.0") await mock_rest_update(hass, freezer) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == "2.0.0" assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0" assert state.attributes[ATTR_IN_PROGRESS] is False assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-fwupdate" @@ -117,7 +118,7 @@ async def test_block_beta_update( monkeypatch.setitem(mock_block_device.status, "cloud", {"connected": False}) await init_integration(hass, 1) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" assert state.attributes[ATTR_LATEST_VERSION] == "1.0.0" @@ -129,7 +130,7 @@ async def test_block_beta_update( ) await mock_rest_update(hass, freezer) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0-beta" @@ -145,7 +146,7 @@ async def test_block_beta_update( ) assert mock_block_device.trigger_ota_update.call_count == 1 - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0-beta" @@ -155,15 +156,14 @@ async def test_block_beta_update( monkeypatch.setitem(mock_block_device.status["update"], "old_version", "2.0.0-beta") await mock_rest_update(hass, freezer) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == "2.0.0-beta" assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0-beta" assert state.attributes[ATTR_IN_PROGRESS] is False assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-fwupdate_beta" @@ -184,14 +184,16 @@ async def test_block_update_connection_error( ) await init_integration(hass, 1) - with pytest.raises(HomeAssistantError) as excinfo: + with pytest.raises( + HomeAssistantError, + match="Device communication error occurred while triggering OTA update for Test name", + ): await hass.services.async_call( UPDATE_DOMAIN, SERVICE_INSTALL, {ATTR_ENTITY_ID: "update.test_name_firmware"}, blocking=True, ) - assert "Error starting OTA update" in str(excinfo.value) @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -254,11 +256,12 @@ async def test_block_version_compare( monkeypatch.setitem(mock_block_device.status, "cloud", {"connected": False}) await init_integration(hass, 1) - state = hass.states.get(entity_id_latest) + assert (state := hass.states.get(entity_id_latest)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == STABLE assert state.attributes[ATTR_LATEST_VERSION] == STABLE - state = hass.states.get(entity_id_beta) + + assert (state := hass.states.get(entity_id_beta)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == STABLE assert state.attributes[ATTR_LATEST_VERSION] == BETA @@ -268,11 +271,12 @@ async def test_block_version_compare( monkeypatch.setitem(mock_block_device.status["update"], "beta_version", BETA) await mock_rest_update(hass, freezer) - state = hass.states.get(entity_id_latest) + assert (state := hass.states.get(entity_id_latest)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == BETA assert state.attributes[ATTR_LATEST_VERSION] == STABLE - state = hass.states.get(entity_id_beta) + + assert (state := hass.states.get(entity_id_beta)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == BETA assert state.attributes[ATTR_LATEST_VERSION] == BETA @@ -296,7 +300,7 @@ async def test_rpc_update( ) await init_integration(hass, 2) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" @@ -314,7 +318,7 @@ async def test_rpc_update( assert mock_rpc_device.trigger_ota_update.call_count == 1 - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" @@ -337,7 +341,7 @@ async def test_rpc_update( }, ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_IN_PROGRESS] is True assert state.attributes[ATTR_UPDATE_PERCENTAGE] == 0 @@ -357,7 +361,7 @@ async def test_rpc_update( }, ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_IN_PROGRESS] is True assert state.attributes[ATTR_UPDATE_PERCENTAGE] == 50 @@ -378,15 +382,14 @@ async def test_rpc_update( monkeypatch.setitem(mock_rpc_device.shelly, "ver", "2") mock_rpc_device.mock_update() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == "2" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is False assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-sys-fwupdate" @@ -417,7 +420,7 @@ async def test_rpc_sleeping_update( mock_rpc_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" @@ -429,7 +432,7 @@ async def test_rpc_sleeping_update( monkeypatch.setitem(mock_rpc_device.shelly, "ver", "2") mock_rpc_device.mock_update() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == "2" assert state.attributes[ATTR_LATEST_VERSION] == "2" @@ -437,8 +440,7 @@ async def test_rpc_sleeping_update( assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_SUPPORTED_FEATURES] == UpdateEntityFeature(0) - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-sys-fwupdate" @@ -469,7 +471,7 @@ async def test_rpc_restored_sleeping_update( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" @@ -486,7 +488,7 @@ async def test_rpc_restored_sleeping_update( mock_rpc_device.mock_update() await hass.async_block_till_done() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == "2" assert state.attributes[ATTR_LATEST_VERSION] == "2" @@ -525,7 +527,7 @@ async def test_rpc_restored_sleeping_update_no_last_state( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_UNKNOWN # Make device online @@ -537,7 +539,7 @@ async def test_rpc_restored_sleeping_update_no_last_state( mock_rpc_device.mock_update() await hass.async_block_till_done() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" @@ -567,7 +569,7 @@ async def test_rpc_beta_update( ) await init_integration(hass, 2) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "1" @@ -584,7 +586,7 @@ async def test_rpc_beta_update( ) await mock_rest_update(hass, freezer) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2b" @@ -614,7 +616,7 @@ async def test_rpc_beta_update( assert mock_rpc_device.trigger_ota_update.call_count == 1 - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2b" @@ -637,7 +639,7 @@ async def test_rpc_beta_update( }, ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_IN_PROGRESS] is True assert state.attributes[ATTR_UPDATE_PERCENTAGE] == 40 @@ -658,23 +660,28 @@ async def test_rpc_beta_update( monkeypatch.setitem(mock_rpc_device.shelly, "ver", "2b") await mock_rest_update(hass, freezer) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == "2b" assert state.attributes[ATTR_LATEST_VERSION] == "2b" assert state.attributes[ATTR_IN_PROGRESS] is False assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-sys-fwupdate_beta" @pytest.mark.parametrize( ("exc", "error"), [ - (DeviceConnectionError, "OTA update connection error: DeviceConnectionError()"), - (RpcCallError(-1, "error"), "OTA update request error"), + ( + DeviceConnectionError, + "Device communication error occurred while triggering OTA update for Test name", + ), + ( + RpcCallError(-1, "error"), + "RPC call error occurred while triggering OTA update for Test name", + ), ], ) @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -701,14 +708,13 @@ async def test_rpc_update_errors( ) await init_integration(hass, 2) - with pytest.raises(HomeAssistantError) as excinfo: + with pytest.raises(HomeAssistantError, match=error): await hass.services.async_call( UPDATE_DOMAIN, SERVICE_INSTALL, {ATTR_ENTITY_ID: "update.test_name_firmware"}, blocking=True, ) - assert error in str(excinfo.value) @pytest.mark.usefixtures("entity_registry_enabled_by_default") diff --git a/tests/components/shelly/test_valve.py b/tests/components/shelly/test_valve.py index 9dc8597120a..7bf9e3b5f1a 100644 --- a/tests/components/shelly/test_valve.py +++ b/tests/components/shelly/test_valve.py @@ -25,11 +25,11 @@ async def test_block_device_gas_valve( await init_integration(hass, 1, MODEL_GAS) entity_id = "valve.test_name_valve" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-valve_0-valve" - assert hass.states.get(entity_id).state == ValveState.CLOSED + assert (state := hass.states.get(entity_id)) + assert state.state == ValveState.CLOSED await hass.services.async_call( VALVE_DOMAIN, @@ -38,16 +38,14 @@ async def test_block_device_gas_valve( blocking=True, ) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == ValveState.OPENING monkeypatch.setattr(mock_block_device.blocks[GAS_VALVE_BLOCK_ID], "valve", "opened") mock_block_device.mock_update() await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == ValveState.OPEN await hass.services.async_call( @@ -57,14 +55,12 @@ async def test_block_device_gas_valve( blocking=True, ) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == ValveState.CLOSING monkeypatch.setattr(mock_block_device.blocks[GAS_VALVE_BLOCK_ID], "valve", "closed") mock_block_device.mock_update() await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == ValveState.CLOSED diff --git a/tests/components/smartthings/__init__.py b/tests/components/smartthings/__init__.py index ad09f1a7acf..fce344b57a7 100644 --- a/tests/components/smartthings/__init__.py +++ b/tests/components/smartthings/__init__.py @@ -3,7 +3,7 @@ from typing import Any from unittest.mock import AsyncMock -from pysmartthings.models import Attribute, Capability, DeviceEvent +from pysmartthings import Attribute, Capability, DeviceEvent from syrupy import SnapshotAssertion from homeassistant.components.smartthings.const import MAIN diff --git a/tests/components/smartthings/conftest.py b/tests/components/smartthings/conftest.py index 761b65adc8a..ef6b6f29011 100644 --- a/tests/components/smartthings/conftest.py +++ b/tests/components/smartthings/conftest.py @@ -4,7 +4,7 @@ from collections.abc import Generator import time from unittest.mock import AsyncMock, patch -from pysmartthings.models import ( +from pysmartthings import ( DeviceResponse, DeviceStatus, LocationResponse, @@ -106,6 +106,7 @@ def mock_smartthings() -> Generator[AsyncMock]: "centralite", "da_ref_normal_000001", "vd_network_audio_002s", + "vd_sensor_light_2023", "iphone", "da_sac_ehs_000001_sub", "da_wm_dw_000001", @@ -113,8 +114,10 @@ def mock_smartthings() -> Generator[AsyncMock]: "da_wm_wd_000001_1", "da_wm_wm_000001", "da_wm_wm_000001_1", + "da_wm_sc_000001", "da_rvc_normal_000001", "da_ks_microwave_0101x", + "da_ks_cooktop_31001", "da_ks_range_0101x", "da_ks_oven_01061", "hue_color_temperature_bulb", @@ -140,6 +143,7 @@ def mock_smartthings() -> Generator[AsyncMock]: "tplink_p110", "ikea_kadrilj", "aux_ac", + "hw_q80r_soundbar", ] ) def device_fixture( @@ -182,6 +186,7 @@ def mock_config_entry(expires_at: int) -> MockConfigEntry: CONF_INSTALLED_APP_ID: "123", }, version=3, + minor_version=2, ) diff --git a/tests/components/smartthings/fixtures/device_status/da_ks_cooktop_31001.json b/tests/components/smartthings/fixtures/device_status/da_ks_cooktop_31001.json new file mode 100644 index 00000000000..5ca8f56fbbf --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/da_ks_cooktop_31001.json @@ -0,0 +1,508 @@ +{ + "components": { + "burner-02": { + "samsungce.surfaceResidualHeat": { + "surfaceResidualHeat": { + "value": "normal", + "timestamp": "2025-03-25T18:18:28.550Z" + } + }, + "samsungce.cooktopHeatingPower": { + "manualLevel": { + "value": 0, + "timestamp": "2025-03-26T05:57:23.203Z" + }, + "heatingMode": { + "value": "manual", + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "manualLevelMin": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "supportedHeatingModes": { + "value": ["manual", "boost", "keepWarm"], + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "manualLevelMax": { + "value": 15, + "timestamp": "2025-03-25T18:18:28.550Z" + } + }, + "samsungce.countDownTimer": { + "startValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "currentValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "status": { + "value": "idle", + "timestamp": "2025-03-25T18:18:28.550Z" + } + } + }, + "burner-01": { + "samsungce.surfaceResidualHeat": { + "surfaceResidualHeat": { + "value": "normal", + "timestamp": "2025-03-25T18:18:28.518Z" + } + }, + "samsungce.cooktopHeatingPower": { + "manualLevel": { + "value": 0, + "timestamp": "2025-03-26T05:57:23.203Z" + }, + "heatingMode": { + "value": "manual", + "timestamp": "2025-03-25T18:18:28.518Z" + }, + "manualLevelMin": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.518Z" + }, + "supportedHeatingModes": { + "value": ["manual", "boost", "keepWarm"], + "timestamp": "2025-03-25T18:18:28.518Z" + }, + "manualLevelMax": { + "value": 15, + "timestamp": "2025-03-25T18:18:28.518Z" + } + }, + "samsungce.countDownTimer": { + "startValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.518Z" + }, + "currentValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.518Z" + }, + "status": { + "value": "idle", + "timestamp": "2025-03-25T18:18:28.518Z" + } + } + }, + "main": { + "custom.disabledComponents": { + "disabledComponents": { + "value": ["burner-6"], + "timestamp": "2025-03-25T18:18:28.464Z" + } + }, + "custom.userNotification": { + "message": { + "value": null + } + }, + "samsungce.remoteManagementData": { + "reportRawData": { + "value": "AgUBASCgAwAACaEDAAAM4AQAAAAA4QHwAw==", + "timestamp": "2025-03-26T07:27:58.282Z" + }, + "version": { + "value": "CT-31.0001", + "timestamp": "2025-03-25T18:18:28.476Z" + } + }, + "samsungce.deviceIdentification": { + "micomAssayCode": { + "value": "5828", + "timestamp": "2025-03-25T18:18:28.476Z" + }, + "modelName": { + "value": "NZ64B5046GK", + "timestamp": "2025-03-25T18:18:28.476Z" + }, + "serialNumber": { + "value": "B8C878DX900290H", + "timestamp": "2025-03-25T18:18:28.476Z" + }, + "serialNumberExtra": { + "value": "N/A", + "timestamp": "2025-03-25T18:18:28.476Z" + }, + "modelClassificationCode": { + "value": "50000204001611000E00000000000000", + "timestamp": "2025-03-25T18:18:28.476Z" + }, + "description": { + "value": "N/A", + "timestamp": "2025-03-25T18:18:28.476Z" + }, + "releaseYear": { + "value": null + }, + "binaryId": { + "value": "TP2X_DA-KS-COOKTOP-31001", + "timestamp": "2025-03-25T18:18:28.476Z" + } + }, + "switch": { + "switch": { + "value": "off", + "timestamp": "2025-03-26T07:27:58.478Z" + } + }, + "samsungce.errorAndAlarmState": { + "events": { + "value": [], + "timestamp": "2025-03-25T18:18:28.476Z" + } + }, + "samsungce.cooktopFlexZone": { + "flexZones": { + "value": [], + "timestamp": "2025-03-26T05:57:23.671Z" + } + }, + "samsungce.softwareVersion": { + "versions": { + "value": [ + { + "id": "Wifi", + "swType": "Wifi-Application", + "versionNumber": "80001A220811", + "description": "Aug 11 2022 08:38:36, Wifi:ws029_030, STDK : 1.7.4)" + }, + { + "id": "Micom", + "swType": "Micom Software", + "versionNumber": "240617", + "description": "Description for this micom version" + } + ], + "timestamp": "2025-03-25T18:18:28.482Z" + } + }, + "healthCheck": { + "checkInterval": { + "value": null + }, + "healthStatus": { + "value": null + }, + "DeviceWatch-Enroll": { + "value": null + }, + "DeviceWatch-DeviceStatus": { + "value": null + } + }, + "custom.cooktopOperatingState": { + "supportedCooktopOperatingState": { + "value": ["ready", "run", "paused"], + "timestamp": "2025-03-26T07:26:39.690Z" + }, + "cooktopOperatingState": { + "value": "ready", + "timestamp": "2025-03-26T07:27:58.652Z" + } + }, + "samsungce.kitchenDeviceIdentification": { + "regionCode": { + "value": "EU", + "timestamp": "2025-03-25T18:18:28.501Z" + }, + "modelCode": { + "value": "OZ8500B/EU2", + "timestamp": "2025-03-25T18:18:28.501Z" + }, + "fuel": { + "value": null + }, + "type": { + "value": "cooktop", + "timestamp": "2025-03-25T18:18:28.501Z" + }, + "representativeComponent": { + "value": null + } + }, + "samsungce.softwareUpdate": { + "targetModule": { + "value": null + }, + "otnDUID": { + "value": "JHCB2ZD4E2KRY", + "timestamp": "2025-03-25T18:18:28.482Z" + }, + "lastUpdatedDate": { + "value": null + }, + "availableModules": { + "value": [], + "timestamp": "2025-03-25T18:18:28.501Z" + }, + "newVersionAvailable": { + "value": false, + "timestamp": "2025-03-25T18:18:28.501Z" + }, + "operatingState": { + "value": null + }, + "progress": { + "value": null + } + }, + "samsungce.kidsLockControl": { + "lockState": { + "value": "unlocked", + "timestamp": "2025-03-25T18:18:28.476Z" + } + }, + "audioMute": { + "mute": { + "value": "unmuted", + "timestamp": "2025-03-25T18:18:28.464Z" + } + } + }, + "burner-06": { + "samsungce.surfaceResidualHeat": { + "surfaceResidualHeat": { + "value": "normal", + "timestamp": "2025-03-25T18:18:28.591Z" + } + }, + "samsungce.cooktopHeatingPower": { + "manualLevel": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.591Z" + }, + "heatingMode": { + "value": "manual", + "timestamp": "2025-03-25T18:18:28.591Z" + }, + "manualLevelMin": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.591Z" + }, + "supportedHeatingModes": { + "value": ["manual", "boost", "keepWarm"], + "timestamp": "2025-03-25T18:18:28.591Z" + }, + "manualLevelMax": { + "value": 15, + "timestamp": "2025-03-25T18:18:28.591Z" + } + }, + "samsungce.countDownTimer": { + "startValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.591Z" + }, + "currentValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.591Z" + }, + "status": { + "value": null + } + } + }, + "hood": { + "samsungce.connectionState": { + "connectionState": { + "value": "disconnected", + "timestamp": "2025-03-25T18:18:28.650Z" + } + }, + "samsungce.hoodFanSpeed": { + "settableMaxFanSpeed": { + "value": 5, + "timestamp": "2025-03-25T18:18:28.650Z" + }, + "hoodFanSpeed": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.650Z" + }, + "supportedHoodFanSpeed": { + "value": [1, 2, 3, 4, 5], + "timestamp": "2025-03-25T18:18:28.650Z" + }, + "settableMinFanSpeed": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.650Z" + } + }, + "samsungce.countDownTimer": { + "startValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.650Z" + }, + "currentValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.646Z" + }, + "status": { + "value": null + } + }, + "switch": { + "switch": { + "value": null + } + }, + "samsungce.lamp": { + "brightnessLevel": { + "value": null + }, + "supportedBrightnessLevel": { + "value": ["off", "mid"], + "timestamp": "2025-03-25T18:18:28.650Z" + } + } + }, + "burner-05": { + "samsungce.surfaceResidualHeat": { + "surfaceResidualHeat": { + "value": "normal", + "timestamp": "2025-03-25T18:18:28.586Z" + } + }, + "samsungce.cooktopHeatingPower": { + "manualLevel": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.586Z" + }, + "heatingMode": { + "value": "manual", + "timestamp": "2025-03-25T18:18:28.586Z" + }, + "manualLevelMin": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.586Z" + }, + "supportedHeatingModes": { + "value": ["manual", "boost", "keepWarm"], + "timestamp": "2025-03-25T18:18:28.586Z" + }, + "manualLevelMax": { + "value": 15, + "timestamp": "2025-03-25T18:18:28.586Z" + } + }, + "samsungce.countDownTimer": { + "startValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.586Z" + }, + "currentValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.591Z" + }, + "status": { + "value": null + } + } + }, + "burner-04": { + "samsungce.surfaceResidualHeat": { + "surfaceResidualHeat": { + "value": "normal", + "timestamp": "2025-03-25T18:18:28.578Z" + } + }, + "samsungce.cooktopHeatingPower": { + "manualLevel": { + "value": 0, + "timestamp": "2025-03-25T18:49:25.153Z" + }, + "heatingMode": { + "value": "manual", + "timestamp": "2025-03-25T18:18:28.578Z" + }, + "manualLevelMin": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.578Z" + }, + "supportedHeatingModes": { + "value": ["manual", "boost", "keepWarm"], + "timestamp": "2025-03-25T18:18:28.578Z" + }, + "manualLevelMax": { + "value": 15, + "timestamp": "2025-03-25T18:18:28.578Z" + } + }, + "samsungce.countDownTimer": { + "startValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.586Z" + }, + "currentValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.586Z" + }, + "status": { + "value": "idle", + "timestamp": "2025-03-25T18:18:28.578Z" + } + } + }, + "burner-03": { + "samsungce.surfaceResidualHeat": { + "surfaceResidualHeat": { + "value": "normal", + "timestamp": "2025-03-25T18:18:28.550Z" + } + }, + "samsungce.cooktopHeatingPower": { + "manualLevel": { + "value": 0, + "timestamp": "2025-03-26T07:27:58.652Z" + }, + "heatingMode": { + "value": "manual", + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "manualLevelMin": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "supportedHeatingModes": { + "value": ["manual", "boost", "keepWarm"], + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "manualLevelMax": { + "value": 15, + "timestamp": "2025-03-25T18:18:28.550Z" + } + }, + "samsungce.countDownTimer": { + "startValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "currentValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "status": { + "value": "idle", + "timestamp": "2025-03-25T18:18:28.550Z" + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/device_status/da_wm_sc_000001.json b/tests/components/smartthings/fixtures/device_status/da_wm_sc_000001.json new file mode 100644 index 00000000000..d52b5186db3 --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/da_wm_sc_000001.json @@ -0,0 +1,929 @@ +{ + "components": { + "main": { + "samsungce.welcomeMessage": { + "welcomeMessage": { + "value": null + } + }, + "samsungce.deviceIdentification": { + "micomAssayCode": { + "value": "20299141", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "modelName": { + "value": null + }, + "serialNumber": { + "value": null + }, + "serialNumberExtra": { + "value": null + }, + "modelClassificationCode": { + "value": "3801010200151107020100FF00000000", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "description": { + "value": "DA_DF_TP2_20_COMMON_DF8500A/DC92-02995A_0010", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "releaseYear": { + "value": null + }, + "binaryId": { + "value": "DA_DF_TP2_20_COMMON", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "samsungce.steamClosetCycle": { + "supportedCycles": { + "value": [ + { + "cycle": "22", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6106", + "default": "off", + "options": ["off", "on"] + } + } + }, + { + "cycle": "23", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "32", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "09", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "12", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "0C", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "31", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "0B", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "10", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "0A", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6106", + "default": "off", + "options": ["off", "on"] + } + } + }, + { + "cycle": "14", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "13", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6106", + "default": "off", + "options": ["off", "on"] + } + } + }, + { + "cycle": "16", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "24", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6206", + "default": "on", + "options": ["off", "on"] + } + } + }, + { + "cycle": "25", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6206", + "default": "on", + "options": ["off", "on"] + } + } + }, + { + "cycle": "2F", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6206", + "default": "on", + "options": ["off", "on"] + } + } + }, + { + "cycle": "20", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6204", + "default": "on", + "options": ["on"] + } + } + }, + { + "cycle": "0F", + "supportedOptions": { + "keepFresh": { + "raw": "66F0", + "default": "off", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6204", + "default": "on", + "options": ["on"] + } + } + }, + { + "cycle": "27", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "30", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "15", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "1A", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "1B", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "1C", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "2D", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "07", + "supportedOptions": { + "keepFresh": { + "raw": "66F0", + "default": "off", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "08", + "supportedOptions": { + "keepFresh": { + "raw": "66F0", + "default": "off", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + } + ], + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "steamClosetCycle": { + "value": "Table_00_Course_22", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "referenceTable": { + "value": { + "id": "Table_00" + }, + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "switch": { + "switch": { + "value": "off", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "samsungce.quickControl": { + "version": { + "value": null + } + }, + "ocf": { + "st": { + "value": null + }, + "mndt": { + "value": null + }, + "mnfv": { + "value": "DA_DF_TP2_20_COMMON_30230807", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "mnhw": { + "value": "MediaTek", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "di": { + "value": "b93211bf-9d96-bd21-3b2f-964fcc87f5cc", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "mnsl": { + "value": "http://www.samsung.com", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "dmv": { + "value": "res.1.1.0,sh.1.1.0", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "n": { + "value": "[airdresser] Samsung", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "mnmo": { + "value": "DA_DF_TP2_20_COMMON|20299141|3801010200151107020100FF00000000", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "vid": { + "value": "DA-WM-SC-000001", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "mnmn": { + "value": "Samsung Electronics", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "mnml": { + "value": "http://www.samsung.com", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "mnpv": { + "value": "DAWIT 2.0", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "mnos": { + "value": "TizenRT 2.0 + IPv6", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "pi": { + "value": "b93211bf-9d96-bd21-3b2f-964fcc87f5cc", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "icv": { + "value": "core.1.1.0", + "timestamp": "2025-01-14T01:42:53.834Z" + } + }, + "samsungce.steamClosetCyclePreset": { + "maxNumberOfPresets": { + "value": 10, + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "presets": { + "value": { + "F1": {}, + "F2": {}, + "F3": {}, + "F4": {}, + "F5": {}, + "F6": {}, + "F7": {}, + "F8": {}, + "F9": {}, + "FA": {} + }, + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "custom.disabledCapabilities": { + "disabledCapabilities": { + "value": [ + "custom.steamClosetWrinklePrevent", + "custom.veryFineDustFilter", + "demandResponseLoadControl", + "sec.wifiConfiguration", + "samsungce.quickControl", + "samsungce.deviceInfoPrivate" + ], + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "samsungce.driverVersion": { + "versionNumber": { + "value": 24110101, + "timestamp": "2024-12-02T07:55:47.237Z" + } + }, + "sec.diagnosticsInformation": { + "logType": { + "value": ["errCode", "dump"], + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "endpoint": { + "value": "SSM", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "minVersion": { + "value": "1.0", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "signinPermission": { + "value": null + }, + "setupId": { + "value": "A00", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "protocolType": { + "value": "wifi_https", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "tsId": { + "value": null + }, + "mnId": { + "value": "0AJT", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "dumpType": { + "value": "file", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "samsungce.steamClosetKeepFreshMode": { + "operatingState": { + "value": "ready", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "status": { + "value": "off", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "samsungce.kidsLock": { + "lockState": { + "value": "unlocked", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "demandResponseLoadControl": { + "drlcStatus": { + "value": null + } + }, + "powerConsumptionReport": { + "powerConsumption": { + "value": { + "energy": 207500, + "deltaEnergy": 0, + "power": 0, + "powerEnergy": 0.0, + "persistedEnergy": 0, + "energySaved": 0, + "start": "2025-02-10T22:51:59Z", + "end": "2025-02-11T08:21:17Z" + }, + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "dryerOperatingState": { + "completionTime": { + "value": "2025-02-11T09:00:17Z", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "machineState": { + "value": "stop", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "supportedMachineStates": { + "value": ["stop", "run", "pause"], + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "dryerJobState": { + "value": "none", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "refresh": {}, + "samsungce.steamClosetSanitizeMode": { + "status": { + "value": "off", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "custom.jobBeginningStatus": { + "jobBeginningStatus": { + "value": null + } + }, + "execute": { + "data": { + "value": { + "payload": { + "rt": ["x.com.samsung.da.information"], + "if": ["oic.if.baseline", "oic.if.a"], + "x.com.samsung.da.modelNum": "DA_DF_TP2_20_COMMON|20299141|3801010200151107020100FF00000000", + "x.com.samsung.da.description": "DA_DF_TP2_20_COMMON_DF8500A/DC92-02995A_0010", + "x.com.samsung.da.serialNum": "1EG158TW400002M", + "x.com.samsung.da.otnDUID": "MTCHUODP5V4FA", + "x.com.samsung.da.diagProtocolType": "WIFI_HTTPS", + "x.com.samsung.da.diagLogType": ["errCode", "dump"], + "x.com.samsung.da.diagDumpType": "file", + "x.com.samsung.da.diagEndPoint": "SSM", + "x.com.samsung.da.diagMnid": "0AJT", + "x.com.samsung.da.diagSetupid": "A00", + "x.com.samsung.da.diagMinVersion": "1.0", + "x.com.samsung.da.items": [ + { + "x.com.samsung.da.id": "0", + "x.com.samsung.da.description": "DA_DF_TP2_20_COMMON|20299141|3801010200151107020100FF00000000", + "x.com.samsung.da.type": "Software", + "x.com.samsung.da.number": "02673A230807(F821)", + "x.com.samsung.da.newVersionAvailable": "0" + }, + { + "x.com.samsung.da.id": "1", + "x.com.samsung.da.description": "Firmware_1_DB_20299141210618090FFFFF202995412203111604FFFF(015E2029914120299541_30000000)(FileDown:0)(Type:0)", + "x.com.samsung.da.type": "Firmware", + "x.com.samsung.da.number": "21061809,22031116", + "x.com.samsung.da.newVersionAvailable": "0" + }, + { + "x.com.samsung.da.id": "2", + "x.com.samsung.da.description": "Firmware_2_DB_2023564319111852041FFFFFFFFFFFFFFFFFFFFFFFFE(015E20235643FFFFFFFF_30000000)(FileDown:0)(Type:0)", + "x.com.samsung.da.type": "Firmware", + "x.com.samsung.da.number": "19111852,FFFFFFFF" + } + ] + } + }, + "data": { + "href": "/information/vs/0" + }, + "timestamp": "2024-03-06T11:24:05.312Z" + } + }, + "samsungce.steamClosetDelayEnd": { + "remainingTime": { + "value": 0, + "unit": "min", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "samsungce.steamClosetAutoCycleLink": { + "steamClosetAutoCycleLink": { + "value": "on", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "sec.wifiConfiguration": { + "autoReconnection": { + "value": null + }, + "minVersion": { + "value": null + }, + "supportedWiFiFreq": { + "value": null + }, + "supportedAuthType": { + "value": null + }, + "protocolType": { + "value": null + } + }, + "custom.steamClosetWrinklePrevent": { + "steamClosetWrinklePrevent": { + "value": "off", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "remoteControlStatus": { + "remoteControlEnabled": { + "value": "false", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "custom.supportedOptions": { + "course": { + "value": null + }, + "referenceTable": { + "value": { + "id": "Table_00" + }, + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "supportedCourses": { + "value": [ + "22", + "23", + "32", + "09", + "12", + "0C", + "31", + "0B", + "10", + "0A", + "14", + "13", + "16", + "24", + "25", + "2F", + "20", + "0F", + "27", + "30", + "15", + "1A", + "1B", + "1C", + "2D", + "07", + "08" + ], + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "custom.steamClosetOperatingState": { + "supportedSteamClosetJobState": { + "value": ["none", "steaming", "airwashing", "drying", "finish"], + "timestamp": "2025-02-09T22:16:19.221Z" + }, + "completionTime": { + "value": "2025-02-11T09:00:17Z", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "steamClosetMachineState": { + "value": "stop", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "supportedSteamClosetMachineState": { + "value": ["stop", "run", "pause"], + "timestamp": "2023-06-23T16:00:41.238Z" + }, + "steamClosetJobState": { + "value": "none", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "progress": { + "value": 1, + "unit": "%", + "timestamp": "2025-02-10T22:53:25.928Z" + }, + "remainingTimeStr": { + "value": "00:39", + "timestamp": "2025-02-10T22:53:25.928Z" + }, + "steamClosetDelayEndTime": { + "value": null + }, + "remainingTime": { + "value": 39, + "unit": "min", + "timestamp": "2025-02-10T22:53:25.928Z" + } + }, + "custom.energyType": { + "energyType": { + "value": "2.0", + "timestamp": "2024-03-06T11:24:06.106Z" + }, + "energySavingSupport": { + "value": false, + "timestamp": "2024-03-06T11:24:06.106Z" + }, + "drMaxDuration": { + "value": null + }, + "energySavingLevel": { + "value": null + }, + "energySavingInfo": { + "value": null + }, + "supportedEnergySavingLevels": { + "value": null + }, + "energySavingOperation": { + "value": null + }, + "notificationTemplateID": { + "value": null + }, + "energySavingOperationSupport": { + "value": null + } + }, + "samsungce.softwareUpdate": { + "targetModule": { + "value": {}, + "timestamp": "2025-02-09T17:33:28.019Z" + }, + "otnDUID": { + "value": "MTCHUODP5V4FA", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "lastUpdatedDate": { + "value": null + }, + "availableModules": { + "value": [], + "timestamp": "2023-06-23T16:00:41.636Z" + }, + "newVersionAvailable": { + "value": false, + "timestamp": "2025-02-09T17:33:28.019Z" + }, + "operatingState": { + "value": null + }, + "progress": { + "value": null + } + }, + "custom.veryFineDustFilter": { + "veryFineDustFilterStatus": { + "value": null + }, + "veryFineDustFilterResetType": { + "value": null + }, + "veryFineDustFilterUsage": { + "value": null + }, + "veryFineDustFilterLastResetDate": { + "value": null + }, + "veryFineDustFilterUsageStep": { + "value": null + }, + "veryFineDustFilterCapacity": { + "value": null + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/device_status/hw_q80r_soundbar.json b/tests/components/smartthings/fixtures/device_status/hw_q80r_soundbar.json new file mode 100644 index 00000000000..8cd0d3e35a9 --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/hw_q80r_soundbar.json @@ -0,0 +1,173 @@ +{ + "components": { + "main": { + "mediaPlayback": { + "supportedPlaybackCommands": { + "value": ["play", "pause", "stop"], + "timestamp": "2025-03-23T01:10:02.207Z" + }, + "playbackStatus": { + "value": "playing", + "timestamp": "2025-03-23T01:19:44.622Z" + } + }, + "samsungvd.groupInfo": { + "role": { + "value": "none", + "timestamp": "2025-03-23T01:17:10.965Z" + }, + "channel": { + "value": "all", + "timestamp": "2025-03-23T01:17:10.965Z" + }, + "masterName": { + "value": "", + "timestamp": "2025-03-23T01:17:10.965Z" + }, + "status": { + "value": "single", + "timestamp": "2025-03-23T01:17:10.965Z" + } + }, + "audioVolume": { + "volume": { + "value": 1, + "unit": "%", + "timestamp": "2025-03-23T01:17:13.754Z" + } + }, + "ocf": { + "st": { + "value": "NONE", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "mndt": { + "value": "2018-01-01", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "mnfv": { + "value": "HW-Q80RWWB-1012.6", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "mnhw": { + "value": "0-0", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "di": { + "value": "afcf3b91-48fe-4c3b-ab44-ddff2a0a6577", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "mnsl": { + "value": "http://www.samsung.com/sec/audio-video/", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "dmv": { + "value": "res.1.1.0,sh.1.1.0", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "n": { + "value": "[AV] Samsung Soundbar Q80R", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "mnmo": { + "value": "Q80R", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "vid": { + "value": "VD-NetworkAudio-001S", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "mnmn": { + "value": "Samsung Electronics", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "mnml": { + "value": "http://www.samsung.com", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "mnpv": { + "value": "Tizen 4.0", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "mnos": { + "value": "4.1.10", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "pi": { + "value": "afcf3b91-48fe-4c3b-ab44-ddff2a0a6577", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "icv": { + "value": "core.1.1.0", + "timestamp": "2024-12-18T21:07:25.406Z" + } + }, + "mediaInputSource": { + "supportedInputSources": { + "value": ["wifi", "bluetooth", "HDMI1", "HDMI2", "digital"], + "timestamp": "2025-03-23T01:18:01.663Z" + }, + "inputSource": { + "value": "wifi", + "timestamp": "2025-03-23T01:18:01.663Z" + } + }, + "refresh": {}, + "audioNotification": {}, + "audioMute": { + "mute": { + "value": "unmuted", + "timestamp": "2025-03-23T01:17:11.024Z" + } + }, + "execute": { + "data": { + "value": { + "payload": { + "rt": ["x.com.samsung.networkaudio.soundmode"], + "if": ["oic.if.a", "oic.if.baseline"], + "x.com.samsung.networkaudio.soundmode": "standard" + } + }, + "data": { + "href": "/sec/networkaudio/soundmode" + }, + "timestamp": "2023-07-16T23:16:55.582Z" + } + }, + "samsungvd.audioInputSource": { + "supportedInputSources": { + "value": ["wifi", "bluetooth", "HDMI1", "HDMI2", "digital"], + "timestamp": "2025-03-23T01:18:01.663Z" + }, + "inputSource": { + "value": "wificp", + "timestamp": "2025-03-23T01:18:01.663Z" + } + }, + "switch": { + "switch": { + "value": "on", + "timestamp": "2025-03-23T01:19:44.837Z" + } + }, + "audioTrackData": { + "totalTime": { + "value": null, + "timestamp": "2020-07-30T16:09:09.109Z" + }, + "audioTrackData": { + "value": { + "title": "Never Gonna Give You Up", + "artist": "Rick Astley" + }, + "timestamp": "2025-03-23T01:19:15.067Z" + }, + "elapsedTime": { + "value": null, + "timestamp": "2020-07-30T16:09:09.109Z" + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/device_status/vd_sensor_light_2023.json b/tests/components/smartthings/fixtures/device_status/vd_sensor_light_2023.json new file mode 100644 index 00000000000..cffefa20c4a --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/vd_sensor_light_2023.json @@ -0,0 +1,95 @@ +{ + "components": { + "main": { + "ocf": { + "st": { + "value": "2025-01-14T08:07:36Z", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "mndt": { + "value": "2023-01-01", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "mnfv": { + "value": "latest", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "mnhw": { + "value": "", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "di": { + "value": "5cc1c096-98b9-460c-8f1c-1045509ec605", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "mnsl": { + "value": "", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "dmv": { + "value": "res.1.1.0,sh.1.1.0", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "n": { + "value": "Light Sensor - 55 The Frame", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "mnmo": { + "value": "QE55LS03DAUXXN", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "vid": { + "value": "VD-Sensor.Light-2023", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "mnmn": { + "value": "Samsung Electronics", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "mnml": { + "value": "", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "mnpv": { + "value": "8.0", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "mnos": { + "value": "Tizen", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "pi": { + "value": "5cc1c096-98b9-460c-8f1c-1045509ec605", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "icv": { + "value": "core.1.1.0", + "timestamp": "2025-01-14T08:07:40.220Z" + } + }, + "samsungvd.deviceCategory": { + "category": { + "value": null + } + }, + "relativeBrightness": { + "brightnessIntensity": { + "value": 2, + "unit": "level", + "timestamp": "2025-02-11T19:08:25.539Z" + } + }, + "refresh": {}, + "execute": { + "data": { + "value": null + } + }, + "switch": { + "switch": { + "value": null + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/devices/da_ks_cooktop_31001.json b/tests/components/smartthings/fixtures/devices/da_ks_cooktop_31001.json new file mode 100644 index 00000000000..433e45dae7a --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/da_ks_cooktop_31001.json @@ -0,0 +1,277 @@ +{ + "items": [ + { + "deviceId": "808dbd84-f357-47e2-a0cd-3b66fa22d584", + "name": "Builtin Cooktop", + "label": "Induction Hob", + "manufacturerName": "0A4H", + "presentationId": "DA-KS-COOKTOP-31001", + "deviceManufacturerCode": "0A4H", + "locationId": "7d27161a-0ef6-4294-91a0-80054ea5bc59", + "ownerId": "d52fb883-0f76-f4d9-0f6a-7ec2c0987b11", + "roomId": "afe14ff1-d444-420d-a766-4dd52f3e1c71", + "deviceTypeId": "Cooktop", + "deviceTypeName": "Samsung Cooktop", + "components": [ + { + "id": "main", + "label": "main", + "capabilities": [ + { + "id": "healthCheck", + "version": 1 + }, + { + "id": "switch", + "version": 1 + }, + { + "id": "audioMute", + "version": 1 + }, + { + "id": "custom.disabledComponents", + "version": 1 + }, + { + "id": "custom.userNotification", + "version": 1 + }, + { + "id": "custom.cooktopOperatingState", + "version": 1 + }, + { + "id": "samsungce.deviceIdentification", + "version": 1 + }, + { + "id": "samsungce.kitchenDeviceIdentification", + "version": 1 + }, + { + "id": "samsungce.softwareVersion", + "version": 1 + }, + { + "id": "samsungce.softwareUpdate", + "version": 1 + }, + { + "id": "samsungce.errorAndAlarmState", + "version": 1 + }, + { + "id": "samsungce.remoteManagementData", + "version": 1 + }, + { + "id": "samsungce.kidsLockControl", + "version": 1 + }, + { + "id": "samsungce.cooktopFlexZone", + "version": 1 + } + ], + "categories": [ + { + "name": "Cooktop", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "burner-01", + "label": "burner-01", + "capabilities": [ + { + "id": "samsungce.surfaceResidualHeat", + "version": 1 + }, + { + "id": "samsungce.cooktopHeatingPower", + "version": 1 + }, + { + "id": "samsungce.countDownTimer", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "burner-02", + "label": "burner-02", + "capabilities": [ + { + "id": "samsungce.surfaceResidualHeat", + "version": 1 + }, + { + "id": "samsungce.cooktopHeatingPower", + "version": 1 + }, + { + "id": "samsungce.countDownTimer", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "burner-03", + "label": "burner-03", + "capabilities": [ + { + "id": "samsungce.surfaceResidualHeat", + "version": 1 + }, + { + "id": "samsungce.cooktopHeatingPower", + "version": 1 + }, + { + "id": "samsungce.countDownTimer", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "burner-04", + "label": "burner-04", + "capabilities": [ + { + "id": "samsungce.surfaceResidualHeat", + "version": 1 + }, + { + "id": "samsungce.cooktopHeatingPower", + "version": 1 + }, + { + "id": "samsungce.countDownTimer", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "burner-05", + "label": "burner-05", + "capabilities": [ + { + "id": "samsungce.surfaceResidualHeat", + "version": 1 + }, + { + "id": "samsungce.cooktopHeatingPower", + "version": 1 + }, + { + "id": "samsungce.countDownTimer", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "burner-06", + "label": "burner-06", + "capabilities": [ + { + "id": "samsungce.surfaceResidualHeat", + "version": 1 + }, + { + "id": "samsungce.cooktopHeatingPower", + "version": 1 + }, + { + "id": "samsungce.countDownTimer", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "hood", + "label": "hood", + "capabilities": [ + { + "id": "switch", + "version": 1 + }, + { + "id": "samsungce.connectionState", + "version": 1 + }, + { + "id": "samsungce.hoodFanSpeed", + "version": 1 + }, + { + "id": "samsungce.lamp", + "version": 1 + }, + { + "id": "samsungce.countDownTimer", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + } + ], + "createTime": "2025-03-25T18:18:23.576Z", + "profile": { + "id": "a99bbcb8-51c9-468d-b9d5-0ce6dca09d5a" + }, + "mqtt": { + "executingLocally": false, + "transferCandidate": false + }, + "type": "MQTT", + "restrictionTier": 0, + "allowed": null, + "executionContext": "CLOUD", + "relationships": [] + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/fixtures/devices/da_wm_sc_000001.json b/tests/components/smartthings/fixtures/devices/da_wm_sc_000001.json new file mode 100644 index 00000000000..8b501cba9b7 --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/da_wm_sc_000001.json @@ -0,0 +1,172 @@ +{ + "items": [ + { + "deviceId": "b93211bf-9d96-bd21-3b2f-964fcc87f5cc", + "name": "[airdresser] Samsung", + "label": "AirDresser", + "manufacturerName": "Samsung Electronics", + "presentationId": "DA-WM-SC-000001", + "deviceManufacturerCode": "Samsung Electronics", + "locationId": "df59873c-4e2c-43ba-bcd4-ade4efb0504a", + "ownerId": "71254e90-c144-45b6-aabe-709f78f48376", + "roomId": "4c9052ba-4430-4cb1-a788-f1e4449c43c9", + "deviceTypeName": "Samsung OCF Steam Closet", + "components": [ + { + "id": "main", + "label": "main", + "capabilities": [ + { + "id": "ocf", + "version": 1 + }, + { + "id": "execute", + "version": 1 + }, + { + "id": "refresh", + "version": 1 + }, + { + "id": "switch", + "version": 1 + }, + { + "id": "remoteControlStatus", + "version": 1 + }, + { + "id": "dryerOperatingState", + "version": 1 + }, + { + "id": "demandResponseLoadControl", + "version": 1 + }, + { + "id": "powerConsumptionReport", + "version": 1 + }, + { + "id": "custom.steamClosetOperatingState", + "version": 1 + }, + { + "id": "custom.disabledCapabilities", + "version": 1 + }, + { + "id": "custom.energyType", + "version": 1 + }, + { + "id": "custom.steamClosetWrinklePrevent", + "version": 1 + }, + { + "id": "custom.jobBeginningStatus", + "version": 1 + }, + { + "id": "custom.supportedOptions", + "version": 1 + }, + { + "id": "custom.veryFineDustFilter", + "version": 1 + }, + { + "id": "samsungce.deviceIdentification", + "version": 1 + }, + { + "id": "samsungce.driverVersion", + "version": 1 + }, + { + "id": "samsungce.softwareUpdate", + "version": 1 + }, + { + "id": "samsungce.steamClosetDelayEnd", + "version": 1 + }, + { + "id": "samsungce.steamClosetKeepFreshMode", + "version": 1 + }, + { + "id": "samsungce.steamClosetSanitizeMode", + "version": 1 + }, + { + "id": "samsungce.steamClosetAutoCycleLink", + "version": 1 + }, + { + "id": "samsungce.steamClosetCycle", + "version": 1 + }, + { + "id": "samsungce.steamClosetCyclePreset", + "version": 1 + }, + { + "id": "samsungce.kidsLock", + "version": 1 + }, + { + "id": "samsungce.welcomeMessage", + "version": 1 + }, + { + "id": "samsungce.quickControl", + "version": 1 + }, + { + "id": "sec.diagnosticsInformation", + "version": 1 + }, + { + "id": "sec.wifiConfiguration", + "version": 1 + } + ], + "categories": [ + { + "name": "ClothingCareMachine", + "categoryType": "manufacturer" + } + ] + } + ], + "createTime": "2023-06-23T16:00:40.545Z", + "profile": { + "id": "a3623498-4747-3761-bac1-ba13f437d8ea" + }, + "ocf": { + "ocfDeviceType": "x.com.st.d.steamcloset", + "name": "[airdresser] Samsung", + "specVersion": "core.1.1.0", + "verticalDomainSpecVersion": "res.1.1.0,sh.1.1.0", + "manufacturerName": "Samsung Electronics", + "modelNumber": "DA_DF_TP2_20_COMMON|20299141|3801010200151107020100FF00000000", + "platformVersion": "DAWIT 2.0", + "platformOS": "TizenRT 2.0 + IPv6", + "hwVersion": "MediaTek", + "firmwareVersion": "DA_DF_TP2_20_COMMON_30230807", + "vendorId": "DA-WM-SC-000001", + "vendorResourceClientServerVersion": "MediaTek Release 2.211214.1", + "lastSignupTime": "2023-06-23T16:00:36.793123Z", + "transferCandidate": false, + "additionalAuthCodeRequired": false + }, + "type": "OCF", + "restrictionTier": 0, + "allowed": [], + "executionContext": "CLOUD" + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/fixtures/devices/hw_q80r_soundbar.json b/tests/components/smartthings/fixtures/devices/hw_q80r_soundbar.json new file mode 100644 index 00000000000..5f99cefddcb --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/hw_q80r_soundbar.json @@ -0,0 +1,106 @@ +{ + "items": [ + { + "deviceId": "afcf3b91-0000-1111-2222-ddff2a0a6577", + "name": "[AV] Samsung Soundbar Q80R", + "label": "Soundbar", + "manufacturerName": "Samsung Electronics", + "presentationId": "VD-NetworkAudio-001S", + "deviceManufacturerCode": "Samsung Electronics", + "locationId": "c7f8e400-0000-1111-2222-76463f4eb484", + "ownerId": "bd0d9288-0000-1111-2222-68310a42a709", + "roomId": "be09ff51-0000-1111-2222-e48e2dab37fd", + "deviceTypeName": "Samsung OCF Network Audio Player", + "components": [ + { + "id": "main", + "label": "Soundbar", + "capabilities": [ + { + "id": "ocf", + "version": 1 + }, + { + "id": "execute", + "version": 1 + }, + { + "id": "refresh", + "version": 1 + }, + { + "id": "switch", + "version": 1 + }, + { + "id": "audioVolume", + "version": 1 + }, + { + "id": "audioMute", + "version": 1 + }, + { + "id": "audioTrackData", + "version": 1 + }, + { + "id": "mediaInputSource", + "version": 1 + }, + { + "id": "samsungvd.audioInputSource", + "version": 1 + }, + { + "id": "mediaPlayback", + "version": 1 + }, + { + "id": "audioNotification", + "version": 1 + }, + { + "id": "samsungvd.groupInfo", + "version": 1 + } + ], + "categories": [ + { + "name": "NetworkAudio", + "categoryType": "manufacturer" + } + ] + } + ], + "createTime": "2020-10-19T01:35:08Z", + "profile": { + "id": "c1036d88-000-1111-2222-a361463fd53f" + }, + "ocf": { + "ocfDeviceType": "oic.d.networkaudio", + "name": "[AV] Samsung Soundbar Q80R", + "specVersion": "core.1.1.0", + "verticalDomainSpecVersion": "res.1.1.0,sh.1.1.0", + "manufacturerName": "Samsung Electronics", + "modelNumber": "Q80R", + "platformVersion": "Tizen 4.0", + "platformOS": "4.1.10", + "hwVersion": "0-0", + "firmwareVersion": "HW-Q80RWWB-1012.6", + "vendorId": "VD-NetworkAudio-001S", + "vendorResourceClientServerVersion": "1.2", + "locale": "KO", + "lastSignupTime": "2021-01-16T07:05:02.184545Z", + "transferCandidate": false, + "additionalAuthCodeRequired": false + }, + "type": "OCF", + "restrictionTier": 0, + "allowed": null, + "executionContext": "CLOUD", + "relationships": [] + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/fixtures/devices/vd_sensor_light_2023.json b/tests/components/smartthings/fixtures/devices/vd_sensor_light_2023.json new file mode 100644 index 00000000000..ef1dd2e96bc --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/vd_sensor_light_2023.json @@ -0,0 +1,81 @@ +{ + "items": [ + { + "deviceId": "5cc1c096-98b9-460c-8f1c-1045509ec605", + "name": "VD-Sensor.Light-2023", + "label": "Light Sensor - 55\" The Frame", + "manufacturerName": "Samsung Electronics", + "presentationId": "VD-Sensor.Light-2023", + "deviceManufacturerCode": "Samsung Electronics", + "locationId": "df59873c-4e2c-43ba-bcd4-ade4efb0504a", + "ownerId": "71254e90-c144-45b6-aabe-709f78f48376", + "roomId": "8a4fac38-48d1-4a8c-922b-92620442363b", + "deviceTypeName": "x.com.st.d.sensor.light", + "components": [ + { + "id": "main", + "label": "main", + "capabilities": [ + { + "id": "ocf", + "version": 1 + }, + { + "id": "switch", + "version": 1 + }, + { + "id": "refresh", + "version": 1 + }, + { + "id": "execute", + "version": 1 + }, + { + "id": "relativeBrightness", + "version": 1 + }, + { + "id": "samsungvd.deviceCategory", + "version": 1 + } + ], + "categories": [ + { + "name": "LightSensor", + "categoryType": "manufacturer" + } + ] + } + ], + "createTime": "2024-11-15T22:21:27.908Z", + "parentDeviceId": "425ac77a-f7c9-a62d-ff12-cdad144952e3", + "profile": { + "id": "5f1633fb-0c63-34d3-9d04-a314d393d225" + }, + "ocf": { + "ocfDeviceType": "x.com.st.d.sensor.light", + "name": "Light Sensor - 55 The Frame", + "specVersion": "core.1.1.0", + "verticalDomainSpecVersion": "res.1.1.0,sh.1.1.0", + "manufacturerName": "Samsung Electronics", + "modelNumber": "QE55LS03DAUXXN", + "platformVersion": "8.0", + "platformOS": "Tizen", + "hwVersion": "", + "firmwareVersion": "latest", + "vendorId": "VD-Sensor.Light-2023", + "vendorResourceClientServerVersion": "4.0.26", + "lastSignupTime": "2024-11-15T22:21:27.933740026Z", + "transferCandidate": false, + "additionalAuthCodeRequired": false + }, + "type": "OCF", + "restrictionTier": 0, + "allowed": [], + "executionContext": "CLOUD" + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/snapshots/test_binary_sensor.ambr b/tests/components/smartthings/snapshots/test_binary_sensor.ambr index 9bb52a71eee..d6a5ac6a4e7 100644 --- a/tests/components/smartthings/snapshots/test_binary_sensor.ambr +++ b/tests/components/smartthings/snapshots/test_binary_sensor.ambr @@ -29,7 +29,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd.motion', + 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd_main_motionSensor_motion_motion', 'unit_of_measurement': None, }) # --- @@ -77,7 +77,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd.sound', + 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd_main_soundSensor_sound_sound', 'unit_of_measurement': None, }) # --- @@ -125,7 +125,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '2d9a892b-1c93-45a5-84cb-0e81889498c6.contact', + 'unique_id': '2d9a892b-1c93-45a5-84cb-0e81889498c6_main_contactSensor_contact_contact', 'unit_of_measurement': None, }) # --- @@ -143,6 +143,54 @@ 'state': 'off', }) # --- +# name: test_all_entities[da_ks_cooktop_31001][binary_sensor.induction_hob_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.induction_hob_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '808dbd84-f357-47e2-a0cd-3b66fa22d584_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_cooktop_31001][binary_sensor.induction_hob_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Induction Hob Power', + }), + 'context': , + 'entity_id': 'binary_sensor.induction_hob_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_all_entities[da_ks_microwave_0101x][binary_sensor.microwave_child_lock-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -173,7 +221,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'child_lock', - 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a.lockState', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_samsungce.kidsLock_lockState_lockState', 'unit_of_measurement': None, }) # --- @@ -220,7 +268,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'door', - 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a.doorState', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_samsungce.doorState_doorState_doorState', 'unit_of_measurement': None, }) # --- @@ -238,6 +286,54 @@ 'state': 'off', }) # --- +# name: test_all_entities[da_ks_microwave_0101x][binary_sensor.microwave_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.microwave_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_microwave_0101x][binary_sensor.microwave_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Microwave Power', + }), + 'context': , + 'entity_id': 'binary_sensor.microwave_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_all_entities[da_ks_microwave_0101x][binary_sensor.microwave_remote_control-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -268,7 +364,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remote_control', - 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a.remoteControlEnabled', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_remoteControlStatus_remoteControlEnabled_remoteControlEnabled', 'unit_of_measurement': None, }) # --- @@ -315,7 +411,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'child_lock', - 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f.lockState', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_samsungce.kidsLock_lockState_lockState', 'unit_of_measurement': None, }) # --- @@ -362,7 +458,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'door', - 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f.doorState', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_samsungce.doorState_doorState_doorState', 'unit_of_measurement': None, }) # --- @@ -410,7 +506,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remote_control', - 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f.remoteControlEnabled', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_remoteControlStatus_remoteControlEnabled_remoteControlEnabled', 'unit_of_measurement': None, }) # --- @@ -457,7 +553,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'child_lock', - 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18.lockState', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_samsungce.kidsLock_lockState_lockState', 'unit_of_measurement': None, }) # --- @@ -504,7 +600,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'door', - 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18.doorState', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_samsungce.doorState_doorState_doorState', 'unit_of_measurement': None, }) # --- @@ -552,7 +648,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remote_control', - 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18.remoteControlEnabled', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_remoteControlStatus_remoteControlEnabled_remoteControlEnabled', 'unit_of_measurement': None, }) # --- @@ -599,7 +695,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'cooler_door', - 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_cooler_contactSensor_contact', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_cooler_contactSensor_contact_contact', 'unit_of_measurement': None, }) # --- @@ -647,7 +743,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09.contact', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_main_contactSensor_contact_contact', 'unit_of_measurement': None, }) # --- @@ -695,7 +791,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'freezer_door', - 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_freezer_contactSensor_contact', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_freezer_contactSensor_contact_contact', 'unit_of_measurement': None, }) # --- @@ -743,7 +839,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'child_lock', - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676.lockState', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_samsungce.kidsLock_lockState_lockState', 'unit_of_measurement': None, }) # --- @@ -760,6 +856,54 @@ 'state': 'off', }) # --- +# name: test_all_entities[da_wm_dw_000001][binary_sensor.dishwasher_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.dishwasher_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_dw_000001][binary_sensor.dishwasher_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Dishwasher Power', + }), + 'context': , + 'entity_id': 'binary_sensor.dishwasher_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_all_entities[da_wm_dw_000001][binary_sensor.dishwasher_remote_control-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -790,7 +934,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remote_control', - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676.remoteControlEnabled', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_remoteControlStatus_remoteControlEnabled_remoteControlEnabled', 'unit_of_measurement': None, }) # --- @@ -807,6 +951,148 @@ 'state': 'off', }) # --- +# name: test_all_entities[da_wm_sc_000001][binary_sensor.airdresser_child_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.airdresser_child_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Child lock', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'child_lock', + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_samsungce.kidsLock_lockState_lockState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_sc_000001][binary_sensor.airdresser_child_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'AirDresser Child lock', + }), + 'context': , + 'entity_id': 'binary_sensor.airdresser_child_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][binary_sensor.airdresser_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.airdresser_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_sc_000001][binary_sensor.airdresser_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'AirDresser Power', + }), + 'context': , + 'entity_id': 'binary_sensor.airdresser_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][binary_sensor.airdresser_remote_control-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.airdresser_remote_control', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Remote control', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'remote_control', + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_remoteControlStatus_remoteControlEnabled_remoteControlEnabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_sc_000001][binary_sensor.airdresser_remote_control-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'AirDresser Remote control', + }), + 'context': , + 'entity_id': 'binary_sensor.airdresser_remote_control', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_all_entities[da_wm_wd_000001][binary_sensor.dryer_child_lock-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -837,7 +1123,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'child_lock', - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b.lockState', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_samsungce.kidsLock_lockState_lockState', 'unit_of_measurement': None, }) # --- @@ -884,7 +1170,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b.switch', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -932,7 +1218,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remote_control', - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b.remoteControlEnabled', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_remoteControlStatus_remoteControlEnabled_remoteControlEnabled', 'unit_of_measurement': None, }) # --- @@ -949,6 +1235,53 @@ 'state': 'off', }) # --- +# name: test_all_entities[da_wm_wd_000001][binary_sensor.dryer_wrinkle_prevent_active-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.dryer_wrinkle_prevent_active', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Wrinkle prevent active', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dryer_wrinkle_prevent_active', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_custom.dryerWrinklePrevent_operatingState_operatingState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wd_000001][binary_sensor.dryer_wrinkle_prevent_active-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dryer Wrinkle prevent active', + }), + 'context': , + 'entity_id': 'binary_sensor.dryer_wrinkle_prevent_active', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_all_entities[da_wm_wd_000001_1][binary_sensor.seca_roupa_child_lock-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -979,7 +1312,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'child_lock', - 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd.lockState', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_samsungce.kidsLock_lockState_lockState', 'unit_of_measurement': None, }) # --- @@ -1026,7 +1359,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd.switch', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -1074,7 +1407,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remote_control', - 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd.remoteControlEnabled', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_remoteControlStatus_remoteControlEnabled_remoteControlEnabled', 'unit_of_measurement': None, }) # --- @@ -1091,6 +1424,53 @@ 'state': 'off', }) # --- +# name: test_all_entities[da_wm_wd_000001_1][binary_sensor.seca_roupa_wrinkle_prevent_active-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.seca_roupa_wrinkle_prevent_active', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Wrinkle prevent active', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dryer_wrinkle_prevent_active', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_custom.dryerWrinklePrevent_operatingState_operatingState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][binary_sensor.seca_roupa_wrinkle_prevent_active-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Seca-Roupa Wrinkle prevent active', + }), + 'context': , + 'entity_id': 'binary_sensor.seca_roupa_wrinkle_prevent_active', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_all_entities[da_wm_wm_000001][binary_sensor.washer_child_lock-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1121,7 +1501,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'child_lock', - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47.lockState', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_samsungce.kidsLock_lockState_lockState', 'unit_of_measurement': None, }) # --- @@ -1168,7 +1548,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47.switch', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -1216,7 +1596,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remote_control', - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47.remoteControlEnabled', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_remoteControlStatus_remoteControlEnabled_remoteControlEnabled', 'unit_of_measurement': None, }) # --- @@ -1263,7 +1643,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'child_lock', - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7.lockState', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_samsungce.kidsLock_lockState_lockState', 'unit_of_measurement': None, }) # --- @@ -1310,7 +1690,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7.switch', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -1358,7 +1738,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remote_control', - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7.remoteControlEnabled', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_remoteControlStatus_remoteControlEnabled_remoteControlEnabled', 'unit_of_measurement': None, }) # --- @@ -1405,7 +1785,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'd5dc3299-c266-41c7-bd08-f540aea54b89.motion', + 'unique_id': 'd5dc3299-c266-41c7-bd08-f540aea54b89_main_motionSensor_motion_motion', 'unit_of_measurement': None, }) # --- @@ -1453,7 +1833,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'd5dc3299-c266-41c7-bd08-f540aea54b89.presence', + 'unique_id': 'd5dc3299-c266-41c7-bd08-f540aea54b89_main_presenceSensor_presence_presence', 'unit_of_measurement': None, }) # --- @@ -1501,7 +1881,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '184c67cc-69e2-44b6-8f73-55c963068ad9.presence', + 'unique_id': '184c67cc-69e2-44b6-8f73-55c963068ad9_main_presenceSensor_presence_presence', 'unit_of_measurement': None, }) # --- @@ -1549,7 +1929,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c.contact', + 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c_main_contactSensor_contact_contact', 'unit_of_measurement': None, }) # --- @@ -1597,7 +1977,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'acceleration', - 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c.acceleration', + 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c_main_accelerationSensor_acceleration_acceleration', 'unit_of_measurement': None, }) # --- @@ -1645,7 +2025,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'valve', - 'unique_id': '612ab3c2-3bb0-48f7-b2c0-15b169cb2fc3.valve', + 'unique_id': '612ab3c2-3bb0-48f7-b2c0-15b169cb2fc3_main_valve_valve_valve', 'unit_of_measurement': None, }) # --- @@ -1693,7 +2073,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'a2a6018b-2663-4727-9d1d-8f56953b5116.water', + 'unique_id': 'a2a6018b-2663-4727-9d1d-8f56953b5116_main_waterSensor_water_water', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/smartthings/snapshots/test_button.ambr b/tests/components/smartthings/snapshots/test_button.ambr index a16ad794929..2c9dbd008af 100644 --- a/tests/components/smartthings/snapshots/test_button.ambr +++ b/tests/components/smartthings/snapshots/test_button.ambr @@ -29,7 +29,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'stop', - 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenOperatingState', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenOperatingState_stop', 'unit_of_measurement': None, }) # --- @@ -76,7 +76,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'stop', - 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_ovenOperatingState', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_ovenOperatingState_stop', 'unit_of_measurement': None, }) # --- @@ -123,7 +123,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'stop', - 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_ovenOperatingState', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_ovenOperatingState_stop', 'unit_of_measurement': None, }) # --- @@ -140,3 +140,50 @@ 'state': 'unknown', }) # --- +# name: test_all_entities[da_ref_normal_000001][button.refrigerator_reset_water_filter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.refrigerator_reset_water_filter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reset water filter', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reset_water_filter', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_main_custom.waterFilter_resetWaterFilter', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ref_normal_000001][button.refrigerator_reset_water_filter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Refrigerator Reset water filter', + }), + 'context': , + 'entity_id': 'button.refrigerator_reset_water_filter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/smartthings/snapshots/test_climate.ambr b/tests/components/smartthings/snapshots/test_climate.ambr index 893093ee2aa..10e9dbd5489 100644 --- a/tests/components/smartthings/snapshots/test_climate.ambr +++ b/tests/components/smartthings/snapshots/test_climate.ambr @@ -36,7 +36,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': 'bf53a150-f8a4-45d1-aac4-86252475d551', + 'unique_id': 'bf53a150-f8a4-45d1-aac4-86252475d551_main', 'unit_of_measurement': None, }) # --- @@ -99,7 +99,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '286ba274-4093-4bcb-849c-a1a3efe7b1e5', + 'unique_id': '286ba274-4093-4bcb-849c-a1a3efe7b1e5_main', 'unit_of_measurement': None, }) # --- @@ -178,7 +178,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d', + 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d_main', 'unit_of_measurement': None, }) # --- @@ -283,7 +283,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e', + 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e_main', 'unit_of_measurement': None, }) # --- @@ -383,7 +383,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': 'F8042E25-0E53-0000-0000-000000000000', + 'unique_id': 'F8042E25-0E53-0000-0000-000000000000_main', 'unit_of_measurement': None, }) # --- @@ -461,7 +461,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '028469cb-6e89-4f14-8d9a-bfbca5e0fbfc', + 'unique_id': '028469cb-6e89-4f14-8d9a-bfbca5e0fbfc_main', 'unit_of_measurement': None, }) # --- @@ -532,7 +532,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '1888b38f-6246-4f1e-911b-bfcfb66999db', + 'unique_id': '1888b38f-6246-4f1e-911b-bfcfb66999db_main', 'unit_of_measurement': None, }) # --- @@ -595,7 +595,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a', + 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a_main', 'unit_of_measurement': None, }) # --- @@ -657,7 +657,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '69a271f6-6537-4982-8cd9-979866872692', + 'unique_id': '69a271f6-6537-4982-8cd9-979866872692_main', 'unit_of_measurement': None, }) # --- @@ -723,7 +723,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '2894dc93-0f11-49cc-8a81-3a684cebebf6', + 'unique_id': '2894dc93-0f11-49cc-8a81-3a684cebebf6_main', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/smartthings/snapshots/test_cover.ambr b/tests/components/smartthings/snapshots/test_cover.ambr index 6877a8ccc01..4b5cf705665 100644 --- a/tests/components/smartthings/snapshots/test_cover.ambr +++ b/tests/components/smartthings/snapshots/test_cover.ambr @@ -29,7 +29,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '571af102-15db-4030-b76b-245a691f74a5', + 'unique_id': '571af102-15db-4030-b76b-245a691f74a5_main', 'unit_of_measurement': None, }) # --- @@ -79,7 +79,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '71afed1c-006d-4e48-b16e-e7f88f9fd638', + 'unique_id': '71afed1c-006d-4e48-b16e-e7f88f9fd638_main', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/smartthings/snapshots/test_fan.ambr b/tests/components/smartthings/snapshots/test_fan.ambr index 40ab7b12267..1196118b3b5 100644 --- a/tests/components/smartthings/snapshots/test_fan.ambr +++ b/tests/components/smartthings/snapshots/test_fan.ambr @@ -37,7 +37,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': 'f1af21a2-d5a1-437c-b10a-b34a87394b71', + 'unique_id': 'f1af21a2-d5a1-437c-b10a-b34a87394b71_main', 'unit_of_measurement': None, }) # --- @@ -97,7 +97,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '6d95a8b7-4ee3-429a-a13a-00ec9354170c', + 'unique_id': '6d95a8b7-4ee3-429a-a13a-00ec9354170c_main', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/smartthings/snapshots/test_init.ambr b/tests/components/smartthings/snapshots/test_init.ambr index d6e98553015..6a402182b82 100644 --- a/tests/components/smartthings/snapshots/test_init.ambr +++ b/tests/components/smartthings/snapshots/test_init.ambr @@ -431,6 +431,39 @@ 'via_device_id': None, }) # --- +# name: test_devices[da_ks_cooktop_31001] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + '808dbd84-f357-47e2-a0cd-3b66fa22d584', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': None, + 'model': None, + 'model_id': None, + 'name': 'Induction Hob', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- # name: test_devices[da_ks_microwave_0101x] DeviceRegistryEntrySnapshot({ 'area_id': 'theater', @@ -662,6 +695,39 @@ 'via_device_id': None, }) # --- +# name: test_devices[da_wm_sc_000001] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': 'MediaTek', + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Samsung Electronics', + 'model': 'DA_DF_TP2_20_COMMON', + 'model_id': None, + 'name': 'AirDresser', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': 'DA_DF_TP2_20_COMMON_30230807', + 'via_device_id': None, + }) +# --- # name: test_devices[da_wm_wd_000001] DeviceRegistryEntrySnapshot({ 'area_id': 'theater', @@ -1157,6 +1223,39 @@ 'via_device_id': None, }) # --- +# name: test_devices[hw_q80r_soundbar] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '0-0', + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + 'afcf3b91-0000-1111-2222-ddff2a0a6577', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Samsung Electronics', + 'model': 'Q80R', + 'model_id': None, + 'name': 'Soundbar', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': 'HW-Q80RWWB-1012.6', + 'via_device_id': None, + }) +# --- # name: test_devices[ikea_kadrilj] DeviceRegistryEntrySnapshot({ 'area_id': None, @@ -1454,6 +1553,39 @@ 'via_device_id': None, }) # --- +# name: test_devices[vd_sensor_light_2023] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + '5cc1c096-98b9-460c-8f1c-1045509ec605', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Samsung Electronics', + 'model': 'QE55LS03DAUXXN', + 'model_id': None, + 'name': 'Light Sensor - 55" The Frame', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': 'latest', + 'via_device_id': None, + }) +# --- # name: test_devices[vd_stv_2017_k] DeviceRegistryEntrySnapshot({ 'area_id': 'theater', diff --git a/tests/components/smartthings/snapshots/test_light.ambr b/tests/components/smartthings/snapshots/test_light.ambr index f1f2b92de77..6826a555f6a 100644 --- a/tests/components/smartthings/snapshots/test_light.ambr +++ b/tests/components/smartthings/snapshots/test_light.ambr @@ -37,7 +37,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '7c16163e-c94e-482f-95f6-139ae0cd9d5e', + 'unique_id': '7c16163e-c94e-482f-95f6-139ae0cd9d5e_main', 'unit_of_measurement': None, }) # --- @@ -103,7 +103,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': 'd0268a69-abfb-4c92-a646-61cec2e510ad', + 'unique_id': 'd0268a69-abfb-4c92-a646-61cec2e510ad_main', 'unit_of_measurement': None, }) # --- @@ -160,7 +160,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': 'aaedaf28-2ae0-4c1d-b57e-87f6a420c298', + 'unique_id': 'aaedaf28-2ae0-4c1d-b57e-87f6a420c298_main', 'unit_of_measurement': None, }) # --- @@ -221,7 +221,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '440063de-a200-40b5-8a6b-f3399eaa0370', + 'unique_id': '440063de-a200-40b5-8a6b-f3399eaa0370_main', 'unit_of_measurement': None, }) # --- @@ -302,7 +302,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': 'cb958955-b015-498c-9e62-fc0c51abd054', + 'unique_id': 'cb958955-b015-498c-9e62-fc0c51abd054_main', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/smartthings/snapshots/test_lock.ambr b/tests/components/smartthings/snapshots/test_lock.ambr index 2cf9688c3dd..325ce0cc677 100644 --- a/tests/components/smartthings/snapshots/test_lock.ambr +++ b/tests/components/smartthings/snapshots/test_lock.ambr @@ -29,7 +29,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'a9f587c5-5d8b-4273-8907-e7f609af5158', + 'unique_id': 'a9f587c5-5d8b-4273-8907-e7f609af5158_main', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/smartthings/snapshots/test_media_player.ambr b/tests/components/smartthings/snapshots/test_media_player.ambr new file mode 100644 index 00000000000..83f9d19b9fa --- /dev/null +++ b/tests/components/smartthings/snapshots/test_media_player.ambr @@ -0,0 +1,298 @@ +# serializer version: 1 +# name: test_all_entities[hw_q80r_soundbar][media_player.soundbar-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'source_list': list([ + 'wifi', + 'bluetooth', + 'HDMI1', + 'HDMI2', + 'digital', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.soundbar', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'afcf3b91-0000-1111-2222-ddff2a0a6577_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[hw_q80r_soundbar][media_player.soundbar-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speaker', + 'friendly_name': 'Soundbar', + 'is_volume_muted': False, + 'media_artist': 'Rick Astley', + 'media_title': 'Never Gonna Give You Up', + 'source': 'wifi', + 'source_list': list([ + 'wifi', + 'bluetooth', + 'HDMI1', + 'HDMI2', + 'digital', + ]), + 'supported_features': , + 'volume_level': 0.01, + }), + 'context': , + 'entity_id': 'media_player.soundbar', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_all_entities[im_speaker_ai_0001][media_player.galaxy_home_mini-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.galaxy_home_mini', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[im_speaker_ai_0001][media_player.galaxy_home_mini-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speaker', + 'friendly_name': 'Galaxy Home Mini', + 'is_volume_muted': False, + 'repeat': , + 'shuffle': False, + 'supported_features': , + 'volume_level': 0.52, + }), + 'context': , + 'entity_id': 'media_player.galaxy_home_mini', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- +# name: test_all_entities[sonos_player][media_player.elliots_rum-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.elliots_rum', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'c85fced9-c474-4a47-93c2-037cc7829536_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sonos_player][media_player.elliots_rum-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speaker', + 'friendly_name': 'Elliots Rum', + 'is_volume_muted': False, + 'media_artist': 'David Guetta', + 'media_title': 'Forever Young', + 'supported_features': , + 'volume_level': 0.15, + }), + 'context': , + 'entity_id': 'media_player.elliots_rum', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_all_entities[vd_network_audio_002s][media_player.soundbar_living-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.soundbar_living', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '0d94e5db-8501-2355-eb4f-214163702cac_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[vd_network_audio_002s][media_player.soundbar_living-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speaker', + 'friendly_name': 'Soundbar Living', + 'is_volume_muted': False, + 'media_artist': '', + 'media_title': '', + 'source': 'HDMI1', + 'supported_features': , + 'volume_level': 0.17, + }), + 'context': , + 'entity_id': 'media_player.soundbar_living', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[vd_stv_2017_k][media_player.tv_samsung_8_series_49-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'source_list': list([ + 'digitalTv', + 'HDMI1', + 'HDMI4', + 'HDMI4', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.tv_samsung_8_series_49', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[vd_stv_2017_k][media_player.tv_samsung_8_series_49-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'tv', + 'friendly_name': '[TV] Samsung 8 Series (49)', + 'is_volume_muted': True, + 'source': 'HDMI1', + 'source_list': list([ + 'digitalTv', + 'HDMI1', + 'HDMI4', + 'HDMI4', + ]), + 'supported_features': , + 'volume_level': 0.13, + }), + 'context': , + 'entity_id': 'media_player.tv_samsung_8_series_49', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/smartthings/snapshots/test_number.ambr b/tests/components/smartthings/snapshots/test_number.ambr index 18d0a775c95..66aade5b958 100644 --- a/tests/components/smartthings/snapshots/test_number.ambr +++ b/tests/components/smartthings/snapshots/test_number.ambr @@ -7,7 +7,7 @@ 'capabilities': dict({ 'max': 5, 'min': 0, - 'mode': , + 'mode': , 'step': 1.0, }), 'config_entry_id': , @@ -34,7 +34,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'washer_rinse_cycles', - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_custom.washerRinseCycles', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_custom.washerRinseCycles_washerRinseCycles_washerRinseCycles', 'unit_of_measurement': 'cycles', }) # --- @@ -44,7 +44,7 @@ 'friendly_name': 'Washer Rinse cycles', 'max': 5, 'min': 0, - 'mode': , + 'mode': , 'step': 1.0, 'unit_of_measurement': 'cycles', }), @@ -64,7 +64,7 @@ 'capabilities': dict({ 'max': 5, 'min': 0, - 'mode': , + 'mode': , 'step': 1.0, }), 'config_entry_id': , @@ -91,7 +91,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'washer_rinse_cycles', - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_custom.washerRinseCycles', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_custom.washerRinseCycles_washerRinseCycles_washerRinseCycles', 'unit_of_measurement': 'cycles', }) # --- @@ -101,7 +101,7 @@ 'friendly_name': 'Washing Machine Rinse cycles', 'max': 5, 'min': 0, - 'mode': , + 'mode': , 'step': 1.0, 'unit_of_measurement': 'cycles', }), diff --git a/tests/components/smartthings/snapshots/test_select.ambr b/tests/components/smartthings/snapshots/test_select.ambr index 649e876bb9e..06185e09547 100644 --- a/tests/components/smartthings/snapshots/test_select.ambr +++ b/tests/components/smartthings/snapshots/test_select.ambr @@ -1,4 +1,120 @@ # serializer version: 1 +# name: test_all_entities[da_wm_dw_000001][select.dishwasher-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'stop', + 'run', + 'pause', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.dishwasher', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'operating_state', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_dishwasherOperatingState_machineState_machineState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_dw_000001][select.dishwasher-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dishwasher', + 'options': list([ + 'stop', + 'run', + 'pause', + ]), + }), + 'context': , + 'entity_id': 'select.dishwasher', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'stop', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][select.airdresser-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'stop', + 'run', + 'pause', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.airdresser', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'operating_state', + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_dryerOperatingState_machineState_machineState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_sc_000001][select.airdresser-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'AirDresser', + 'options': list([ + 'stop', + 'run', + 'pause', + ]), + }), + 'context': , + 'entity_id': 'select.airdresser', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'stop', + }) +# --- # name: test_all_entities[da_wm_wd_000001][select.dryer-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -35,7 +151,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'operating_state', - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_dryerOperatingState', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_dryerOperatingState_machineState_machineState', 'unit_of_measurement': None, }) # --- @@ -93,7 +209,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'operating_state', - 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_dryerOperatingState', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_dryerOperatingState_machineState_machineState', 'unit_of_measurement': None, }) # --- @@ -151,7 +267,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'operating_state', - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_washerOperatingState', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_washerOperatingState_machineState_machineState', 'unit_of_measurement': None, }) # --- @@ -209,7 +325,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'operating_state', - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_washerOperatingState', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_washerOperatingState_machineState_machineState', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/smartthings/snapshots/test_sensor.ambr b/tests/components/smartthings/snapshots/test_sensor.ambr index 8656d12c955..416a3d15947 100644 --- a/tests/components/smartthings/snapshots/test_sensor.ambr +++ b/tests/components/smartthings/snapshots/test_sensor.ambr @@ -31,7 +31,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'f0af21a2-d5a1-437c-b10a-b34a87394b71.energy', + 'unique_id': 'f0af21a2-d5a1-437c-b10a-b34a87394b71_main_energyMeter_energy_energy', 'unit_of_measurement': 'kWh', }) # --- @@ -83,7 +83,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'f0af21a2-d5a1-437c-b10a-b34a87394b71.power', + 'unique_id': 'f0af21a2-d5a1-437c-b10a-b34a87394b71_main_powerMeter_power_power', 'unit_of_measurement': 'W', }) # --- @@ -135,7 +135,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'f0af21a2-d5a1-437c-b10a-b34a87394b71.voltage', + 'unique_id': 'f0af21a2-d5a1-437c-b10a-b34a87394b71_main_voltageMeasurement_voltage_voltage', 'unit_of_measurement': None, }) # --- @@ -186,7 +186,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'bf53a150-f8a4-45d1-aac4-86252475d551.temperature', + 'unique_id': 'bf53a150-f8a4-45d1-aac4-86252475d551_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -238,7 +238,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '68e786a6-7f61-4c3a-9e13-70b803cf782b.energy', + 'unique_id': '68e786a6-7f61-4c3a-9e13-70b803cf782b_main_energyMeter_energy_energy', 'unit_of_measurement': 'kWh', }) # --- @@ -290,7 +290,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '68e786a6-7f61-4c3a-9e13-70b803cf782b.power', + 'unique_id': '68e786a6-7f61-4c3a-9e13-70b803cf782b_main_powerMeter_power_power', 'unit_of_measurement': 'W', }) # --- @@ -340,7 +340,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '286ba274-4093-4bcb-849c-a1a3efe7b1e5.battery', + 'unique_id': '286ba274-4093-4bcb-849c-a1a3efe7b1e5_main_battery_battery_battery', 'unit_of_measurement': '%', }) # --- @@ -391,7 +391,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '286ba274-4093-4bcb-849c-a1a3efe7b1e5.temperature', + 'unique_id': '286ba274-4093-4bcb-849c-a1a3efe7b1e5_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -448,7 +448,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'alarm', - 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd.alarm', + 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd_main_alarm_alarm_alarm', 'unit_of_measurement': None, }) # --- @@ -502,7 +502,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd.battery', + 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd_main_battery_battery_battery', 'unit_of_measurement': '%', }) # --- @@ -553,7 +553,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'd0268a69-abfb-4c92-a646-61cec2e510ad.power', + 'unique_id': 'd0268a69-abfb-4c92-a646-61cec2e510ad_main_powerMeter_power_power', 'unit_of_measurement': 'W', }) # --- @@ -603,7 +603,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '2d9a892b-1c93-45a5-84cb-0e81889498c6.battery', + 'unique_id': '2d9a892b-1c93-45a5-84cb-0e81889498c6_main_battery_battery_battery', 'unit_of_measurement': '%', }) # --- @@ -654,7 +654,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '2d9a892b-1c93-45a5-84cb-0e81889498c6.temperature', + 'unique_id': '2d9a892b-1c93-45a5-84cb-0e81889498c6_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -706,7 +706,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'air_quality', - 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666.airQuality', + 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666_main_airQualitySensor_airQuality_airQuality', 'unit_of_measurement': 'CAQI', }) # --- @@ -757,7 +757,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666.carbonDioxide', + 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666_main_carbonDioxideMeasurement_carbonDioxide_carbonDioxide', 'unit_of_measurement': 'ppm', }) # --- @@ -809,7 +809,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666.humidity', + 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666_main_relativeHumidityMeasurement_humidity_humidity', 'unit_of_measurement': '%', }) # --- @@ -859,7 +859,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'odor_sensor', - 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666.odorLevel', + 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666_main_odorSensor_odorLevel_odorLevel', 'unit_of_measurement': None, }) # --- @@ -908,7 +908,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666.veryFineDustLevel', + 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666_main_veryFineDustSensor_veryFineDustLevel_veryFineDustLevel', 'unit_of_measurement': 'µg/m³', }) # --- @@ -960,7 +960,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666.dustLevel', + 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666_main_dustSensor_dustLevel_dustLevel', 'unit_of_measurement': 'µg/m³', }) # --- @@ -1012,7 +1012,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666.fineDustLevel', + 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666_main_dustSensor_fineDustLevel_fineDustLevel', 'unit_of_measurement': 'µg/m³', }) # --- @@ -1064,7 +1064,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666.temperature', + 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -1119,7 +1119,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d.energy_meter', + 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d_main_powerConsumptionReport_powerConsumption_energy_meter', 'unit_of_measurement': , }) # --- @@ -1174,7 +1174,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_difference', - 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d.deltaEnergy_meter', + 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', 'unit_of_measurement': , }) # --- @@ -1229,7 +1229,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_saved', - 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d.energySaved_meter', + 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d_main_powerConsumptionReport_powerConsumption_energySaved_meter', 'unit_of_measurement': , }) # --- @@ -1281,7 +1281,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d.humidity', + 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d_main_relativeHumidityMeasurement_humidity_humidity', 'unit_of_measurement': '%', }) # --- @@ -1336,7 +1336,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d.power_meter', + 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d_main_powerConsumptionReport_powerConsumption_power_meter', 'unit_of_measurement': , }) # --- @@ -1393,7 +1393,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_energy', - 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d.powerEnergy_meter', + 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', 'unit_of_measurement': , }) # --- @@ -1445,7 +1445,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d.temperature', + 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -1495,7 +1495,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'audio_volume', - 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d.volume', + 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d_main_audioVolume_volume_volume', 'unit_of_measurement': '%', }) # --- @@ -1548,7 +1548,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e.energy_meter', + 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e_main_powerConsumptionReport_powerConsumption_energy_meter', 'unit_of_measurement': , }) # --- @@ -1603,7 +1603,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_difference', - 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e.deltaEnergy_meter', + 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', 'unit_of_measurement': , }) # --- @@ -1658,7 +1658,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_saved', - 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e.energySaved_meter', + 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e_main_powerConsumptionReport_powerConsumption_energySaved_meter', 'unit_of_measurement': , }) # --- @@ -1710,7 +1710,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e.humidity', + 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e_main_relativeHumidityMeasurement_humidity_humidity', 'unit_of_measurement': '%', }) # --- @@ -1765,7 +1765,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e.power_meter', + 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e_main_powerConsumptionReport_powerConsumption_power_meter', 'unit_of_measurement': , }) # --- @@ -1822,7 +1822,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_energy', - 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e.powerEnergy_meter', + 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', 'unit_of_measurement': , }) # --- @@ -1874,7 +1874,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e.temperature', + 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -1924,7 +1924,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'audio_volume', - 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e.volume', + 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e_main_audioVolume_volume_volume', 'unit_of_measurement': '%', }) # --- @@ -1974,7 +1974,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'air_quality', - 'unique_id': 'F8042E25-0E53-0000-0000-000000000000.airQuality', + 'unique_id': 'F8042E25-0E53-0000-0000-000000000000_main_airQualitySensor_airQuality_airQuality', 'unit_of_measurement': 'CAQI', }) # --- @@ -2025,7 +2025,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'F8042E25-0E53-0000-0000-000000000000.dustLevel', + 'unique_id': 'F8042E25-0E53-0000-0000-000000000000_main_dustSensor_dustLevel_dustLevel', 'unit_of_measurement': 'µg/m³', }) # --- @@ -2077,7 +2077,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'F8042E25-0E53-0000-0000-000000000000.fineDustLevel', + 'unique_id': 'F8042E25-0E53-0000-0000-000000000000_main_dustSensor_fineDustLevel_fineDustLevel', 'unit_of_measurement': 'µg/m³', }) # --- @@ -2129,7 +2129,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'F8042E25-0E53-0000-0000-000000000000.temperature', + 'unique_id': 'F8042E25-0E53-0000-0000-000000000000_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -2179,7 +2179,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'completion_time', - 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a.completionTime', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenOperatingState_completionTime_completionTime', 'unit_of_measurement': None, }) # --- @@ -2247,7 +2247,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'oven_job_state', - 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a.ovenJobState', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenOperatingState_ovenJobState_ovenJobState', 'unit_of_measurement': None, }) # --- @@ -2320,7 +2320,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'oven_machine_state', - 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a.machineState', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenOperatingState_machineState_machineState', 'unit_of_measurement': None, }) # --- @@ -2401,7 +2401,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'oven_mode', - 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a.ovenMode', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenMode_ovenMode_ovenMode', 'unit_of_measurement': None, }) # --- @@ -2476,7 +2476,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'oven_setpoint', - 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a.ovenSetpoint', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenSetpoint_ovenSetpoint_ovenSetpoint', 'unit_of_measurement': , }) # --- @@ -2527,7 +2527,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a.temperature', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -2577,7 +2577,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'completion_time', - 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f.completionTime', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_ovenOperatingState_completionTime_completionTime', 'unit_of_measurement': None, }) # --- @@ -2645,7 +2645,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'oven_job_state', - 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f.ovenJobState', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_ovenOperatingState_ovenJobState_ovenJobState', 'unit_of_measurement': None, }) # --- @@ -2718,7 +2718,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'oven_machine_state', - 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f.machineState', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_ovenOperatingState_machineState_machineState', 'unit_of_measurement': None, }) # --- @@ -2799,7 +2799,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'oven_mode', - 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f.ovenMode', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_ovenMode_ovenMode_ovenMode', 'unit_of_measurement': None, }) # --- @@ -2874,7 +2874,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'oven_setpoint', - 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f.ovenSetpoint', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_ovenSetpoint_ovenSetpoint_ovenSetpoint', 'unit_of_measurement': , }) # --- @@ -2925,7 +2925,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f.temperature', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -2975,7 +2975,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'completion_time', - 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18.completionTime', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_ovenOperatingState_completionTime_completionTime', 'unit_of_measurement': None, }) # --- @@ -3043,7 +3043,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'oven_job_state', - 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18.ovenJobState', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_ovenOperatingState_ovenJobState_ovenJobState', 'unit_of_measurement': None, }) # --- @@ -3116,7 +3116,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'oven_machine_state', - 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18.machineState', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_ovenOperatingState_machineState_machineState', 'unit_of_measurement': None, }) # --- @@ -3197,7 +3197,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'oven_mode', - 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18.ovenMode', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_ovenMode_ovenMode_ovenMode', 'unit_of_measurement': None, }) # --- @@ -3272,7 +3272,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'oven_setpoint', - 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18.ovenSetpoint', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_ovenSetpoint_ovenSetpoint_ovenSetpoint', 'unit_of_measurement': , }) # --- @@ -3323,7 +3323,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18.temperature', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -3378,7 +3378,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09.energy_meter', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_main_powerConsumptionReport_powerConsumption_energy_meter', 'unit_of_measurement': , }) # --- @@ -3433,7 +3433,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_difference', - 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09.deltaEnergy_meter', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', 'unit_of_measurement': , }) # --- @@ -3488,7 +3488,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_saved', - 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09.energySaved_meter', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_main_powerConsumptionReport_powerConsumption_energySaved_meter', 'unit_of_measurement': , }) # --- @@ -3543,7 +3543,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09.power_meter', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_main_powerConsumptionReport_powerConsumption_power_meter', 'unit_of_measurement': , }) # --- @@ -3600,7 +3600,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_energy', - 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09.powerEnergy_meter', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', 'unit_of_measurement': , }) # --- @@ -3650,7 +3650,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44.battery', + 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44_main_battery_battery_battery', 'unit_of_measurement': '%', }) # --- @@ -3708,7 +3708,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'robot_cleaner_cleaning_mode', - 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44.robotCleanerCleaningMode', + 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44_main_robotCleanerCleaningMode_robotCleanerCleaningMode_robotCleanerCleaningMode', 'unit_of_measurement': None, }) # --- @@ -3777,7 +3777,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'robot_cleaner_movement', - 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44.robotCleanerMovement', + 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44_main_robotCleanerMovement_robotCleanerMovement_robotCleanerMovement', 'unit_of_measurement': None, }) # --- @@ -3844,7 +3844,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'robot_cleaner_turbo_mode', - 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44.robotCleanerTurboMode', + 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44_main_robotCleanerTurboMode_robotCleanerTurboMode_robotCleanerTurboMode', 'unit_of_measurement': None, }) # --- @@ -3898,7 +3898,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'thermostat_cooling_setpoint', - 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100.coolingSetpoint', + 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100_main_thermostatCoolingSetpoint_coolingSetpoint_coolingSetpoint', 'unit_of_measurement': , }) # --- @@ -3952,7 +3952,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100.energy_meter', + 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100_main_powerConsumptionReport_powerConsumption_energy_meter', 'unit_of_measurement': , }) # --- @@ -4007,7 +4007,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_difference', - 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100.deltaEnergy_meter', + 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', 'unit_of_measurement': , }) # --- @@ -4062,7 +4062,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_saved', - 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100.energySaved_meter', + 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100_main_powerConsumptionReport_powerConsumption_energySaved_meter', 'unit_of_measurement': , }) # --- @@ -4117,7 +4117,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100.power_meter', + 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100_main_powerConsumptionReport_powerConsumption_power_meter', 'unit_of_measurement': , }) # --- @@ -4174,7 +4174,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_energy', - 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100.powerEnergy_meter', + 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', 'unit_of_measurement': , }) # --- @@ -4226,7 +4226,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100.temperature', + 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -4276,7 +4276,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'completion_time', - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676.completionTime', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_dishwasherOperatingState_completionTime_completionTime', 'unit_of_measurement': None, }) # --- @@ -4329,7 +4329,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676.energy_meter', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_powerConsumptionReport_powerConsumption_energy_meter', 'unit_of_measurement': , }) # --- @@ -4384,7 +4384,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_difference', - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676.deltaEnergy_meter', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', 'unit_of_measurement': , }) # --- @@ -4439,7 +4439,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_saved', - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676.energySaved_meter', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_powerConsumptionReport_powerConsumption_energySaved_meter', 'unit_of_measurement': , }) # --- @@ -4502,7 +4502,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'dishwasher_job_state', - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676.dishwasherJobState', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_dishwasherOperatingState_dishwasherJobState_dishwasherJobState', 'unit_of_measurement': None, }) # --- @@ -4568,7 +4568,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'dishwasher_machine_state', - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676.machineState', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_dishwasherOperatingState_machineState_machineState', 'unit_of_measurement': None, }) # --- @@ -4626,7 +4626,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676.power_meter', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_powerConsumptionReport_powerConsumption_power_meter', 'unit_of_measurement': , }) # --- @@ -4683,7 +4683,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_energy', - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676.powerEnergy_meter', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', 'unit_of_measurement': , }) # --- @@ -4703,6 +4703,473 @@ 'state': '0.0', }) # --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_completion_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airdresser_completion_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Completion time', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'completion_time', + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_dryerOperatingState_completionTime_completionTime', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_completion_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'AirDresser Completion time', + }), + 'context': , + 'entity_id': 'sensor.airdresser_completion_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2025-02-11T09:00:17+00:00', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airdresser_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_powerConsumptionReport_powerConsumption_energy_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'AirDresser Energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.airdresser_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '207.5', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_energy_difference-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airdresser_energy_difference', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy difference', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_difference', + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_energy_difference-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'AirDresser Energy difference', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.airdresser_energy_difference', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_energy_saved-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airdresser_energy_saved', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy saved', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_saved', + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_powerConsumptionReport_powerConsumption_energySaved_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_energy_saved-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'AirDresser Energy saved', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.airdresser_energy_saved', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_job_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'cooling', + 'delay_wash', + 'drying', + 'finished', + 'none', + 'refreshing', + 'weight_sensing', + 'wrinkle_prevent', + 'dehumidifying', + 'ai_drying', + 'sanitizing', + 'internal_care', + 'freeze_protection', + 'continuous_dehumidifying', + 'thawing_frozen_inside', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airdresser_job_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Job state', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dryer_job_state', + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_dryerOperatingState_dryerJobState_dryerJobState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_job_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'AirDresser Job state', + 'options': list([ + 'cooling', + 'delay_wash', + 'drying', + 'finished', + 'none', + 'refreshing', + 'weight_sensing', + 'wrinkle_prevent', + 'dehumidifying', + 'ai_drying', + 'sanitizing', + 'internal_care', + 'freeze_protection', + 'continuous_dehumidifying', + 'thawing_frozen_inside', + ]), + }), + 'context': , + 'entity_id': 'sensor.airdresser_job_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'none', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_machine_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'pause', + 'run', + 'stop', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airdresser_machine_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Machine state', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dryer_machine_state', + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_dryerOperatingState_machineState_machineState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_machine_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'AirDresser Machine state', + 'options': list([ + 'pause', + 'run', + 'stop', + ]), + }), + 'context': , + 'entity_id': 'sensor.airdresser_machine_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'stop', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airdresser_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_powerConsumptionReport_powerConsumption_power_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'AirDresser Power', + 'power_consumption_end': '2025-02-11T08:21:17Z', + 'power_consumption_start': '2025-02-10T22:51:59Z', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.airdresser_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_power_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airdresser_power_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power energy', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_energy', + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_power_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'AirDresser Power energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.airdresser_power_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- # name: test_all_entities[da_wm_wd_000001][sensor.dryer_completion_time-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -4733,7 +5200,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'completion_time', - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b.completionTime', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_dryerOperatingState_completionTime_completionTime', 'unit_of_measurement': None, }) # --- @@ -4786,7 +5253,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b.energy_meter', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_powerConsumptionReport_powerConsumption_energy_meter', 'unit_of_measurement': , }) # --- @@ -4841,7 +5308,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_difference', - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b.deltaEnergy_meter', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', 'unit_of_measurement': , }) # --- @@ -4896,7 +5363,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_saved', - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b.energySaved_meter', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_powerConsumptionReport_powerConsumption_energySaved_meter', 'unit_of_measurement': , }) # --- @@ -4964,7 +5431,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'dryer_job_state', - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b.dryerJobState', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_dryerOperatingState_dryerJobState_dryerJobState', 'unit_of_measurement': None, }) # --- @@ -5035,7 +5502,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'dryer_machine_state', - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b.machineState', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_dryerOperatingState_machineState_machineState', 'unit_of_measurement': None, }) # --- @@ -5093,7 +5560,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b.power_meter', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_powerConsumptionReport_powerConsumption_power_meter', 'unit_of_measurement': , }) # --- @@ -5150,7 +5617,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_energy', - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b.powerEnergy_meter', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', 'unit_of_measurement': , }) # --- @@ -5200,7 +5667,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'completion_time', - 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd.completionTime', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_dryerOperatingState_completionTime_completionTime', 'unit_of_measurement': None, }) # --- @@ -5253,7 +5720,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd.energy_meter', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_powerConsumptionReport_powerConsumption_energy_meter', 'unit_of_measurement': , }) # --- @@ -5308,7 +5775,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_difference', - 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd.deltaEnergy_meter', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', 'unit_of_measurement': , }) # --- @@ -5363,7 +5830,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_saved', - 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd.energySaved_meter', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_powerConsumptionReport_powerConsumption_energySaved_meter', 'unit_of_measurement': , }) # --- @@ -5431,7 +5898,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'dryer_job_state', - 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd.dryerJobState', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_dryerOperatingState_dryerJobState_dryerJobState', 'unit_of_measurement': None, }) # --- @@ -5502,7 +5969,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'dryer_machine_state', - 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd.machineState', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_dryerOperatingState_machineState_machineState', 'unit_of_measurement': None, }) # --- @@ -5560,7 +6027,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd.power_meter', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_powerConsumptionReport_powerConsumption_power_meter', 'unit_of_measurement': , }) # --- @@ -5617,7 +6084,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_energy', - 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd.powerEnergy_meter', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', 'unit_of_measurement': , }) # --- @@ -5667,7 +6134,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'completion_time', - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47.completionTime', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_washerOperatingState_completionTime_completionTime', 'unit_of_measurement': None, }) # --- @@ -5720,7 +6187,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47.energy_meter', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_powerConsumptionReport_powerConsumption_energy_meter', 'unit_of_measurement': , }) # --- @@ -5775,7 +6242,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_difference', - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47.deltaEnergy_meter', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', 'unit_of_measurement': , }) # --- @@ -5830,7 +6297,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_saved', - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47.energySaved_meter', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_powerConsumptionReport_powerConsumption_energySaved_meter', 'unit_of_measurement': , }) # --- @@ -5899,7 +6366,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'washer_job_state', - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47.washerJobState', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_washerOperatingState_washerJobState_washerJobState', 'unit_of_measurement': None, }) # --- @@ -5971,7 +6438,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'washer_machine_state', - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47.machineState', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_washerOperatingState_machineState_machineState', 'unit_of_measurement': None, }) # --- @@ -6029,7 +6496,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47.power_meter', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_powerConsumptionReport_powerConsumption_power_meter', 'unit_of_measurement': , }) # --- @@ -6086,7 +6553,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_energy', - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47.powerEnergy_meter', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', 'unit_of_measurement': , }) # --- @@ -6136,7 +6603,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'completion_time', - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7.completionTime', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_washerOperatingState_completionTime_completionTime', 'unit_of_measurement': None, }) # --- @@ -6189,7 +6656,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7.energy_meter', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_powerConsumptionReport_powerConsumption_energy_meter', 'unit_of_measurement': , }) # --- @@ -6244,7 +6711,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_difference', - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7.deltaEnergy_meter', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', 'unit_of_measurement': , }) # --- @@ -6299,7 +6766,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_saved', - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7.energySaved_meter', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_powerConsumptionReport_powerConsumption_energySaved_meter', 'unit_of_measurement': , }) # --- @@ -6368,7 +6835,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'washer_job_state', - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7.washerJobState', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_washerOperatingState_washerJobState_washerJobState', 'unit_of_measurement': None, }) # --- @@ -6440,7 +6907,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'washer_machine_state', - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7.machineState', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_washerOperatingState_machineState_machineState', 'unit_of_measurement': None, }) # --- @@ -6498,7 +6965,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7.power_meter', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_powerConsumptionReport_powerConsumption_power_meter', 'unit_of_measurement': , }) # --- @@ -6555,7 +7022,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_energy', - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7.powerEnergy_meter', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', 'unit_of_measurement': , }) # --- @@ -6607,7 +7074,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'd5dc3299-c266-41c7-bd08-f540aea54b89.temperature', + 'unique_id': 'd5dc3299-c266-41c7-bd08-f540aea54b89_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -6659,7 +7126,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '028469cb-6e89-4f14-8d9a-bfbca5e0fbfc.humidity', + 'unique_id': '028469cb-6e89-4f14-8d9a-bfbca5e0fbfc_main_relativeHumidityMeasurement_humidity_humidity', 'unit_of_measurement': '%', }) # --- @@ -6711,7 +7178,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '028469cb-6e89-4f14-8d9a-bfbca5e0fbfc.temperature', + 'unique_id': '028469cb-6e89-4f14-8d9a-bfbca5e0fbfc_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -6763,7 +7230,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '1888b38f-6246-4f1e-911b-bfcfb66999db.humidity', + 'unique_id': '1888b38f-6246-4f1e-911b-bfcfb66999db_main_relativeHumidityMeasurement_humidity_humidity', 'unit_of_measurement': '%', }) # --- @@ -6815,7 +7282,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '1888b38f-6246-4f1e-911b-bfcfb66999db.temperature', + 'unique_id': '1888b38f-6246-4f1e-911b-bfcfb66999db_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': None, }) # --- @@ -6866,7 +7333,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'link_quality', - 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a.lqi', + 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a_main_signalStrength_lqi_lqi', 'unit_of_measurement': None, }) # --- @@ -6916,7 +7383,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a.rssi', + 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a_main_signalStrength_rssi_rssi', 'unit_of_measurement': 'dBm', }) # --- @@ -6968,7 +7435,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a.temperature', + 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -7018,7 +7485,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '5e5b97f3-3094-44e6-abc0-f61283412d6a.battery', + 'unique_id': '5e5b97f3-3094-44e6-abc0-f61283412d6a_main_battery_battery_battery', 'unit_of_measurement': '%', }) # --- @@ -7069,7 +7536,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '69a271f6-6537-4982-8cd9-979866872692.energy', + 'unique_id': '69a271f6-6537-4982-8cd9-979866872692_main_energyMeter_energy_energy', 'unit_of_measurement': 'kWh', }) # --- @@ -7121,7 +7588,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '69a271f6-6537-4982-8cd9-979866872692.power', + 'unique_id': '69a271f6-6537-4982-8cd9-979866872692_main_powerMeter_power_power', 'unit_of_measurement': 'W', }) # --- @@ -7173,7 +7640,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '69a271f6-6537-4982-8cd9-979866872692.temperature', + 'unique_id': '69a271f6-6537-4982-8cd9-979866872692_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -7193,6 +7660,182 @@ 'state': '19.0', }) # --- +# name: test_all_entities[hw_q80r_soundbar][sensor.soundbar_media_input_source-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'wifi', + 'bluetooth', + 'hdmi1', + 'hdmi2', + 'digital', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.soundbar_media_input_source', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Media input source', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'media_input_source', + 'unique_id': 'afcf3b91-0000-1111-2222-ddff2a0a6577_main_mediaInputSource_inputSource_inputSource', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[hw_q80r_soundbar][sensor.soundbar_media_input_source-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Soundbar Media input source', + 'options': list([ + 'wifi', + 'bluetooth', + 'hdmi1', + 'hdmi2', + 'digital', + ]), + }), + 'context': , + 'entity_id': 'sensor.soundbar_media_input_source', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'wifi', + }) +# --- +# name: test_all_entities[hw_q80r_soundbar][sensor.soundbar_media_playback_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'paused', + 'playing', + 'stopped', + 'fast_forwarding', + 'rewinding', + 'buffering', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.soundbar_media_playback_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Media playback status', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'media_playback_status', + 'unique_id': 'afcf3b91-0000-1111-2222-ddff2a0a6577_main_mediaPlayback_playbackStatus_playbackStatus', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[hw_q80r_soundbar][sensor.soundbar_media_playback_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Soundbar Media playback status', + 'options': list([ + 'paused', + 'playing', + 'stopped', + 'fast_forwarding', + 'rewinding', + 'buffering', + ]), + }), + 'context': , + 'entity_id': 'sensor.soundbar_media_playback_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_all_entities[hw_q80r_soundbar][sensor.soundbar_volume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.soundbar_volume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Volume', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'audio_volume', + 'unique_id': 'afcf3b91-0000-1111-2222-ddff2a0a6577_main_audioVolume_volume_volume', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[hw_q80r_soundbar][sensor.soundbar_volume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Soundbar Volume', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.soundbar_volume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- # name: test_all_entities[ikea_kadrilj][sensor.kitchen_ikea_kadrilj_window_blind_battery-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -7223,7 +7866,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '71afed1c-006d-4e48-b16e-e7f88f9fd638.battery', + 'unique_id': '71afed1c-006d-4e48-b16e-e7f88f9fd638_main_battery_battery_battery', 'unit_of_measurement': '%', }) # --- @@ -7272,7 +7915,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'media_input_source', - 'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c.inputSource', + 'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c_main_mediaInputSource_inputSource_inputSource', 'unit_of_measurement': None, }) # --- @@ -7320,7 +7963,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'media_playback_repeat', - 'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c.playbackRepeatMode', + 'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c_main_mediaPlaybackRepeat_playbackRepeatMode_playbackRepeatMode', 'unit_of_measurement': None, }) # --- @@ -7367,7 +8010,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'media_playback_shuffle', - 'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c.playbackShuffle', + 'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c_main_mediaPlaybackShuffle_playbackShuffle_playbackShuffle', 'unit_of_measurement': None, }) # --- @@ -7423,7 +8066,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'media_playback_status', - 'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c.playbackStatus', + 'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c_main_mediaPlayback_playbackStatus_playbackStatus', 'unit_of_measurement': None, }) # --- @@ -7479,7 +8122,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'audio_volume', - 'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c.volume', + 'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c_main_audioVolume_volume_volume', 'unit_of_measurement': '%', }) # --- @@ -7527,7 +8170,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c.battery', + 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c_main_battery_battery_battery', 'unit_of_measurement': '%', }) # --- @@ -7578,7 +8221,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c.temperature', + 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -7628,7 +8271,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'x_coordinate', - 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c X Coordinate', + 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c_main_threeAxis_threeAxis_x_coordinate', 'unit_of_measurement': None, }) # --- @@ -7675,7 +8318,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'y_coordinate', - 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c Y Coordinate', + 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c_main_threeAxis_threeAxis_y_coordinate', 'unit_of_measurement': None, }) # --- @@ -7722,7 +8365,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'z_coordinate', - 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c Z Coordinate', + 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c_main_threeAxis_threeAxis_z_coordinate', 'unit_of_measurement': None, }) # --- @@ -7769,7 +8412,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'air_conditioner_mode', - 'unique_id': 'bf4b1167-48a3-4af7-9186-0900a678ffa5.airConditionerMode', + 'unique_id': 'bf4b1167-48a3-4af7-9186-0900a678ffa5_main_airConditionerMode_airConditionerMode_airConditionerMode', 'unit_of_measurement': None, }) # --- @@ -7816,7 +8459,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'thermostat_cooling_setpoint', - 'unique_id': 'bf4b1167-48a3-4af7-9186-0900a678ffa5.coolingSetpoint', + 'unique_id': 'bf4b1167-48a3-4af7-9186-0900a678ffa5_main_thermostatCoolingSetpoint_coolingSetpoint_coolingSetpoint', 'unit_of_measurement': , }) # --- @@ -7874,7 +8517,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'media_playback_status', - 'unique_id': 'c85fced9-c474-4a47-93c2-037cc7829536.playbackStatus', + 'unique_id': 'c85fced9-c474-4a47-93c2-037cc7829536_main_mediaPlayback_playbackStatus_playbackStatus', 'unit_of_measurement': None, }) # --- @@ -7930,7 +8573,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'audio_volume', - 'unique_id': 'c85fced9-c474-4a47-93c2-037cc7829536.volume', + 'unique_id': 'c85fced9-c474-4a47-93c2-037cc7829536_main_audioVolume_volume_volume', 'unit_of_measurement': '%', }) # --- @@ -7983,7 +8626,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '6602696a-1e48-49e4-919f-69406f5b5da1.energy_meter', + 'unique_id': '6602696a-1e48-49e4-919f-69406f5b5da1_main_powerConsumptionReport_powerConsumption_energy_meter', 'unit_of_measurement': , }) # --- @@ -8038,7 +8681,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_difference', - 'unique_id': '6602696a-1e48-49e4-919f-69406f5b5da1.deltaEnergy_meter', + 'unique_id': '6602696a-1e48-49e4-919f-69406f5b5da1_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', 'unit_of_measurement': , }) # --- @@ -8097,7 +8740,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'media_playback_status', - 'unique_id': '0d94e5db-8501-2355-eb4f-214163702cac.playbackStatus', + 'unique_id': '0d94e5db-8501-2355-eb4f-214163702cac_main_mediaPlayback_playbackStatus_playbackStatus', 'unit_of_measurement': None, }) # --- @@ -8153,7 +8796,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'audio_volume', - 'unique_id': '0d94e5db-8501-2355-eb4f-214163702cac.volume', + 'unique_id': '0d94e5db-8501-2355-eb4f-214163702cac_main_audioVolume_volume_volume', 'unit_of_measurement': '%', }) # --- @@ -8171,6 +8814,57 @@ 'state': '17', }) # --- +# name: test_all_entities[vd_sensor_light_2023][sensor.light_sensor_55_the_frame_brightness_intensity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.light_sensor_55_the_frame_brightness_intensity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Brightness intensity', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'brightness_intensity', + 'unique_id': '5cc1c096-98b9-460c-8f1c-1045509ec605_main_relativeBrightness_brightnessIntensity_brightnessIntensity', + 'unit_of_measurement': 'level', + }) +# --- +# name: test_all_entities[vd_sensor_light_2023][sensor.light_sensor_55_the_frame_brightness_intensity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Light Sensor - 55" The Frame Brightness intensity', + 'state_class': , + 'unit_of_measurement': 'level', + }), + 'context': , + 'entity_id': 'sensor.light_sensor_55_the_frame_brightness_intensity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- # name: test_all_entities[vd_stv_2017_k][sensor.tv_samsung_8_series_49_media_input_source-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -8208,7 +8902,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'media_input_source', - 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1.inputSource', + 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1_main_mediaInputSource_inputSource_inputSource', 'unit_of_measurement': None, }) # --- @@ -8271,7 +8965,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'media_playback_status', - 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1.playbackStatus', + 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1_main_mediaPlayback_playbackStatus_playbackStatus', 'unit_of_measurement': None, }) # --- @@ -8327,7 +9021,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'tv_channel', - 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1.tvChannel', + 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1_main_tvChannel_tvChannel_tvChannel', 'unit_of_measurement': None, }) # --- @@ -8374,7 +9068,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'tv_channel_name', - 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1.tvChannelName', + 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1_main_tvChannel_tvChannelName_tvChannelName', 'unit_of_measurement': None, }) # --- @@ -8421,7 +9115,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'audio_volume', - 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1.volume', + 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1_main_audioVolume_volume_volume', 'unit_of_measurement': '%', }) # --- @@ -8469,7 +9163,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '2894dc93-0f11-49cc-8a81-3a684cebebf6.battery', + 'unique_id': '2894dc93-0f11-49cc-8a81-3a684cebebf6_main_battery_battery_battery', 'unit_of_measurement': '%', }) # --- @@ -8520,7 +9214,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '2894dc93-0f11-49cc-8a81-3a684cebebf6.temperature', + 'unique_id': '2894dc93-0f11-49cc-8a81-3a684cebebf6_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -8570,7 +9264,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'a2a6018b-2663-4727-9d1d-8f56953b5116.battery', + 'unique_id': 'a2a6018b-2663-4727-9d1d-8f56953b5116_main_battery_battery_battery', 'unit_of_measurement': '%', }) # --- @@ -8619,7 +9313,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'a9f587c5-5d8b-4273-8907-e7f609af5158.battery', + 'unique_id': 'a9f587c5-5d8b-4273-8907-e7f609af5158_main_battery_battery_battery', 'unit_of_measurement': '%', }) # --- diff --git a/tests/components/smartthings/snapshots/test_switch.ambr b/tests/components/smartthings/snapshots/test_switch.ambr index 40f242e82f5..8c95d2f20fc 100644 --- a/tests/components/smartthings/snapshots/test_switch.ambr +++ b/tests/components/smartthings/snapshots/test_switch.ambr @@ -29,7 +29,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd', + 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -46,6 +46,53 @@ 'state': 'on', }) # --- +# name: test_all_entities[da_ks_cooktop_31001][switch.induction_hob-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.induction_hob', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '808dbd84-f357-47e2-a0cd-3b66fa22d584_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_cooktop_31001][switch.induction_hob-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Induction Hob', + }), + 'context': , + 'entity_id': 'switch.induction_hob', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_all_entities[da_ks_microwave_0101x][switch.microwave-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -76,7 +123,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -93,6 +140,53 @@ 'state': 'off', }) # --- +# name: test_all_entities[da_ref_normal_000001][switch.refrigerator_ice_maker-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.refrigerator_ice_maker', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Ice maker', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ice_maker', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_icemaker_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ref_normal_000001][switch.refrigerator_ice_maker-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Refrigerator Ice maker', + }), + 'context': , + 'entity_id': 'switch.refrigerator_ice_maker', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_all_entities[da_rvc_normal_000001][switch.robot_vacuum-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -123,7 +217,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44', + 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -170,7 +264,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100', + 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -217,7 +311,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -234,6 +328,53 @@ 'state': 'off', }) # --- +# name: test_all_entities[da_wm_sc_000001][switch.airdresser-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.airdresser', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_sc_000001][switch.airdresser-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'AirDresser', + }), + 'context': , + 'entity_id': 'switch.airdresser', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_all_entities[da_wm_wd_000001][switch.dryer-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -264,7 +405,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -311,7 +452,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wrinkle_prevent', - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_custom.dryerWrinklePrevent', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_custom.dryerWrinklePrevent_dryerWrinklePrevent_dryerWrinklePrevent', 'unit_of_measurement': None, }) # --- @@ -358,7 +499,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -405,7 +546,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wrinkle_prevent', - 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_custom.dryerWrinklePrevent', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_custom.dryerWrinklePrevent_dryerWrinklePrevent_dryerWrinklePrevent', 'unit_of_measurement': None, }) # --- @@ -452,7 +593,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -499,7 +640,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -516,6 +657,53 @@ 'state': 'on', }) # --- +# name: test_all_entities[da_wm_wm_000001_1][switch.washing_machine_bubble_soak-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.washing_machine_bubble_soak', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Bubble Soak', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'bubble_soak', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_samsungce.washerBubbleSoak_status_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wm_000001_1][switch.washing_machine_bubble_soak-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Washing Machine Bubble Soak', + }), + 'context': , + 'entity_id': 'switch.washing_machine_bubble_soak', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_all_entities[generic_ef00_v1][switch.thermostat_kuche-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -546,7 +734,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a', + 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -563,6 +751,53 @@ 'state': 'off', }) # --- +# name: test_all_entities[hw_q80r_soundbar][switch.soundbar-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.soundbar', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'afcf3b91-0000-1111-2222-ddff2a0a6577_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[hw_q80r_soundbar][switch.soundbar-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Soundbar', + }), + 'context': , + 'entity_id': 'switch.soundbar', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_all_entities[sensibo_airconditioner_1][switch.office-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -593,7 +828,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'bf4b1167-48a3-4af7-9186-0900a678ffa5', + 'unique_id': 'bf4b1167-48a3-4af7-9186-0900a678ffa5_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -640,7 +875,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '550a1c72-65a0-4d55-b97b-75168e055398', + 'unique_id': '550a1c72-65a0-4d55-b97b-75168e055398_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -687,7 +922,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '6602696a-1e48-49e4-919f-69406f5b5da1', + 'unique_id': '6602696a-1e48-49e4-919f-69406f5b5da1_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -734,7 +969,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '0d94e5db-8501-2355-eb4f-214163702cac', + 'unique_id': '0d94e5db-8501-2355-eb4f-214163702cac_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -751,6 +986,53 @@ 'state': 'on', }) # --- +# name: test_all_entities[vd_sensor_light_2023][switch.light_sensor_55_the_frame-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.light_sensor_55_the_frame', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '5cc1c096-98b9-460c-8f1c-1045509ec605_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[vd_sensor_light_2023][switch.light_sensor_55_the_frame-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Light Sensor - 55" The Frame', + }), + 'context': , + 'entity_id': 'switch.light_sensor_55_the_frame', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_all_entities[vd_stv_2017_k][switch.tv_samsung_8_series_49-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -781,7 +1063,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1', + 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/smartthings/snapshots/test_update.ambr b/tests/components/smartthings/snapshots/test_update.ambr index e74d2d8518c..c27a0b9f5fc 100644 --- a/tests/components/smartthings/snapshots/test_update.ambr +++ b/tests/components/smartthings/snapshots/test_update.ambr @@ -29,7 +29,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '286ba274-4093-4bcb-849c-a1a3efe7b1e5', + 'unique_id': '286ba274-4093-4bcb-849c-a1a3efe7b1e5_main', 'unit_of_measurement': None, }) # --- @@ -89,7 +89,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': 'd0268a69-abfb-4c92-a646-61cec2e510ad', + 'unique_id': 'd0268a69-abfb-4c92-a646-61cec2e510ad_main', 'unit_of_measurement': None, }) # --- @@ -149,7 +149,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '2d9a892b-1c93-45a5-84cb-0e81889498c6', + 'unique_id': '2d9a892b-1c93-45a5-84cb-0e81889498c6_main', 'unit_of_measurement': None, }) # --- @@ -209,7 +209,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '71afed1c-006d-4e48-b16e-e7f88f9fd638', + 'unique_id': '71afed1c-006d-4e48-b16e-e7f88f9fd638_main', 'unit_of_measurement': None, }) # --- @@ -269,7 +269,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c', + 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c_main', 'unit_of_measurement': None, }) # --- @@ -329,7 +329,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '550a1c72-65a0-4d55-b97b-75168e055398', + 'unique_id': '550a1c72-65a0-4d55-b97b-75168e055398_main', 'unit_of_measurement': None, }) # --- @@ -389,7 +389,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': 'a9f587c5-5d8b-4273-8907-e7f609af5158', + 'unique_id': 'a9f587c5-5d8b-4273-8907-e7f609af5158_main', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/smartthings/snapshots/test_valve.ambr b/tests/components/smartthings/snapshots/test_valve.ambr index bdb61187e3a..f82155c8499 100644 --- a/tests/components/smartthings/snapshots/test_valve.ambr +++ b/tests/components/smartthings/snapshots/test_valve.ambr @@ -29,7 +29,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '612ab3c2-3bb0-48f7-b2c0-15b169cb2fc3', + 'unique_id': '612ab3c2-3bb0-48f7-b2c0-15b169cb2fc3_main', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/smartthings/test_binary_sensor.py b/tests/components/smartthings/test_binary_sensor.py index 4d58b5ddd48..517de034613 100644 --- a/tests/components/smartthings/test_binary_sensor.py +++ b/tests/components/smartthings/test_binary_sensor.py @@ -59,16 +59,23 @@ async def test_state_update( @pytest.mark.usefixtures("entity_registry_enabled_by_default") -@pytest.mark.parametrize("device_fixture", ["virtual_valve"]) +@pytest.mark.parametrize( + ("device_fixture", "issue_string", "entity_id"), + [ + ("virtual_valve", "valve", "binary_sensor.volvo_valve"), + ("da_ref_normal_000001", "fridge_door", "binary_sensor.refrigerator_door"), + ], +) async def test_create_issue( hass: HomeAssistant, devices: AsyncMock, mock_config_entry: MockConfigEntry, issue_registry: ir.IssueRegistry, + issue_string: str, + entity_id: str, ) -> None: """Test we create an issue when an automation or script is using a deprecated entity.""" - entity_id = "binary_sensor.volvo_valve" - issue_id = f"deprecated_binary_valve_{entity_id}" + issue_id = f"deprecated_binary_{issue_string}_{entity_id}" assert await async_setup_component( hass, diff --git a/tests/components/smartthings/test_config_flow.py b/tests/components/smartthings/test_config_flow.py index 4069c201225..d6e8ef03290 100644 --- a/tests/components/smartthings/test_config_flow.py +++ b/tests/components/smartthings/test_config_flow.py @@ -513,7 +513,7 @@ async def test_migration( } assert mock_old_config_entry.unique_id == "397678e5-9995-4a39-9d9f-ae6ba310236c" assert mock_old_config_entry.version == 3 - assert mock_old_config_entry.minor_version == 1 + assert mock_old_config_entry.minor_version == 2 @pytest.mark.usefixtures("current_request_with_host", "use_cloud") @@ -586,7 +586,7 @@ async def test_migration_wrong_location( == "appid123-2be1-4e40-b257-e4ef59083324_397678e5-9995-4a39-9d9f-ae6ba310236c" ) assert mock_old_config_entry.version == 3 - assert mock_old_config_entry.minor_version == 1 + assert mock_old_config_entry.minor_version == 2 @pytest.mark.usefixtures("current_request_with_host") diff --git a/tests/components/smartthings/test_event.py b/tests/components/smartthings/test_event.py index bdca7674981..34a96e9c6b4 100644 --- a/tests/components/smartthings/test_event.py +++ b/tests/components/smartthings/test_event.py @@ -7,6 +7,7 @@ from pysmartthings import Attribute, Capability import pytest from syrupy import SnapshotAssertion +from homeassistant.components.event import ATTR_EVENT_TYPES from homeassistant.const import STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -59,3 +60,40 @@ async def test_state_update( hass.states.get("event.livingroom_smart_switch_button1").state == "2023-10-21T00:00:00.000+00:00" ) + + +@pytest.mark.parametrize("device_fixture", ["heatit_zpushwall"]) +async def test_supported_button_values_update( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test supported button values update.""" + await setup_integration(hass, mock_config_entry) + + freezer.move_to("2023-10-21") + + assert ( + hass.states.get("event.livingroom_smart_switch_button1").state == STATE_UNKNOWN + ) + assert hass.states.get("event.livingroom_smart_switch_button1").attributes[ + ATTR_EVENT_TYPES + ] == ["pushed", "held", "down_hold"] + + await trigger_update( + hass, + devices, + "5e5b97f3-3094-44e6-abc0-f61283412d6a", + Capability.BUTTON, + Attribute.SUPPORTED_BUTTON_VALUES, + ["pushed", "held", "down_hold", "pushed_2x"], + component="button1", + ) + + assert ( + hass.states.get("event.livingroom_smart_switch_button1").state == STATE_UNKNOWN + ) + assert hass.states.get("event.livingroom_smart_switch_button1").attributes[ + ATTR_EVENT_TYPES + ] == ["pushed", "held", "down_hold", "pushed_2x"] diff --git a/tests/components/smartthings/test_init.py b/tests/components/smartthings/test_init.py index c0d0b8b5840..1d4b124c60d 100644 --- a/tests/components/smartthings/test_init.py +++ b/tests/components/smartthings/test_init.py @@ -8,19 +8,33 @@ from pysmartthings import ( Capability, DeviceResponse, DeviceStatus, + Lifecycle, SmartThingsSinkError, + Subscription, ) -from pysmartthings.models import Lifecycle, Subscription import pytest from syrupy import SnapshotAssertion -from homeassistant.components.climate import HVACMode +from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN +from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN, HVACMode +from homeassistant.components.cover import DOMAIN as COVER_DOMAIN +from homeassistant.components.fan import DOMAIN as FAN_DOMAIN +from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.components.smartthings import EVENT_BUTTON -from homeassistant.components.smartthings.const import CONF_SUBSCRIPTION_ID, DOMAIN +from homeassistant.components.smartthings.const import ( + CONF_INSTALLED_APP_ID, + CONF_LOCATION_ID, + CONF_SUBSCRIPTION_ID, + DOMAIN, + SCOPES, +) +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import Event, HomeAssistant -from homeassistant.helpers import device_registry as dr +from homeassistant.helpers import device_registry as dr, entity_registry as er from . import setup_integration, trigger_update @@ -353,7 +367,6 @@ async def test_deleted_device_runtime( hass: HomeAssistant, devices: AsyncMock, mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, ) -> None: """Test devices that are deleted in runtime.""" await setup_integration(hass, mock_config_entry) @@ -366,3 +379,321 @@ async def test_deleted_device_runtime( await hass.async_block_till_done() assert hass.states.get("climate.ac_office_granit") is None + + +@pytest.mark.parametrize( + ( + "device_fixture", + "domain", + "old_unique_id", + "suggested_object_id", + "new_unique_id", + ), + [ + ( + "multipurpose_sensor", + BINARY_SENSOR_DOMAIN, + "7d246592-93db-4d72-a10d-5a51793ece8c.contact", + "deck_door", + "7d246592-93db-4d72-a10d-5a51793ece8c_main_contactSensor_contact_contact", + ), + ( + "multipurpose_sensor", + SENSOR_DOMAIN, + "7d246592-93db-4d72-a10d-5a51793ece8c Y Coordinate", + "deck_door_y_coordinate", + "7d246592-93db-4d72-a10d-5a51793ece8c_main_threeAxis_threeAxis_y_coordinate", + ), + ( + "da_ac_rac_000001", + SENSOR_DOMAIN, + "7d246592-93db-4d72-a10d-ca799957065d.energy_meter", + "ac_office_granit_energy", + "7d246592-93db-4d72-a10d-ca799957065d_main_powerConsumptionReport_powerConsumption_energy_meter", + ), + ( + "da_ac_rac_000001", + CLIMATE_DOMAIN, + "7d246592-93db-4d72-a10d-ca799957065d", + "ac_office_granit", + "7d246592-93db-4d72-a10d-ca799957065d_main", + ), + ( + "c2c_shade", + COVER_DOMAIN, + "571af102-15db-4030-b76b-245a691f74a5", + "curtain_1a", + "571af102-15db-4030-b76b-245a691f74a5_main", + ), + ( + "generic_fan_3_speed", + FAN_DOMAIN, + "6d95a8b7-4ee3-429a-a13a-00ec9354170c", + "bedroom_fan", + "6d95a8b7-4ee3-429a-a13a-00ec9354170c_main", + ), + ( + "hue_rgbw_color_bulb", + LIGHT_DOMAIN, + "cb958955-b015-498c-9e62-fc0c51abd054", + "standing_light", + "cb958955-b015-498c-9e62-fc0c51abd054_main", + ), + ( + "yale_push_button_deadbolt_lock", + LOCK_DOMAIN, + "a9f587c5-5d8b-4273-8907-e7f609af5158", + "basement_door_lock", + "a9f587c5-5d8b-4273-8907-e7f609af5158_main", + ), + ( + "smart_plug", + SWITCH_DOMAIN, + "550a1c72-65a0-4d55-b97b-75168e055398", + "arlo_beta_basestation", + "550a1c72-65a0-4d55-b97b-75168e055398_main_switch_switch_switch", + ), + ], +) +async def test_entity_unique_id_migration( + hass: HomeAssistant, + devices: AsyncMock, + expires_at: int, + entity_registry: er.EntityRegistry, + domain: str, + old_unique_id: str, + suggested_object_id: str, + new_unique_id: str, +) -> None: + """Test entity unique ID migration.""" + mock_config_entry = MockConfigEntry( + domain=DOMAIN, + title="My home", + unique_id="397678e5-9995-4a39-9d9f-ae6ba310236c", + data={ + "auth_implementation": DOMAIN, + "token": { + "access_token": "mock-access-token", + "refresh_token": "mock-refresh-token", + "expires_at": expires_at, + "scope": " ".join(SCOPES), + "access_tier": 0, + "installed_app_id": "5aaaa925-2be1-4e40-b257-e4ef59083324", + }, + CONF_LOCATION_ID: "397678e5-9995-4a39-9d9f-ae6ba310236c", + CONF_INSTALLED_APP_ID: "123", + }, + version=3, + minor_version=1, + ) + mock_config_entry.add_to_hass(hass) + entry = entity_registry.async_get_or_create( + domain, + DOMAIN, + old_unique_id, + config_entry=mock_config_entry, + suggested_object_id=suggested_object_id, + ) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entry.entity_id) + + assert entry.unique_id == new_unique_id + + +@pytest.mark.parametrize( + ( + "device_fixture", + "domain", + "other_unique_id", + "old_unique_id", + "suggested_object_id", + "new_unique_id", + ), + [ + ( + "da_ks_microwave_0101x", + SENSOR_DOMAIN, + "2bad3237-4886-e699-1b90-4a51a3d55c8a.ovenJobState", + "2bad3237-4886-e699-1b90-4a51a3d55c8a.machineState", + "microwave_machine_state", + "2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenOperatingState_machineState_machineState", + ), + ( + "da_ks_microwave_0101x", + SENSOR_DOMAIN, + "2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenOperatingState_ovenJobState_ovenJobState", + "2bad3237-4886-e699-1b90-4a51a3d55c8a.machineState", + "microwave_machine_state", + "2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenOperatingState_machineState_machineState", + ), + ( + "da_ks_microwave_0101x", + SENSOR_DOMAIN, + "2bad3237-4886-e699-1b90-4a51a3d55c8a.ovenJobState", + "2bad3237-4886-e699-1b90-4a51a3d55c8a.completionTime", + "microwave_completion_time", + "2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenOperatingState_completionTime_completionTime", + ), + ( + "da_ks_microwave_0101x", + SENSOR_DOMAIN, + "2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenOperatingState_ovenJobState_ovenJobState", + "2bad3237-4886-e699-1b90-4a51a3d55c8a.completionTime", + "microwave_completion_time", + "2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenOperatingState_completionTime_completionTime", + ), + ( + "da_wm_dw_000001", + SENSOR_DOMAIN, + "f36dc7ce-cac0-0667-dc14-a3704eb5e676.dishwasherJobState", + "f36dc7ce-cac0-0667-dc14-a3704eb5e676.machineState", + "dishwasher_machine_state", + "f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_dishwasherOperatingState_machineState_machineState", + ), + ( + "da_wm_dw_000001", + SENSOR_DOMAIN, + "f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_dishwasherOperatingState_dishwasherJobState_dishwasherJobState", + "f36dc7ce-cac0-0667-dc14-a3704eb5e676.machineState", + "dishwasher_machine_state", + "f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_dishwasherOperatingState_machineState_machineState", + ), + ( + "da_wm_dw_000001", + SENSOR_DOMAIN, + "f36dc7ce-cac0-0667-dc14-a3704eb5e676.dishwasherJobState", + "f36dc7ce-cac0-0667-dc14-a3704eb5e676.completionTime", + "dishwasher_completion_time", + "f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_dishwasherOperatingState_completionTime_completionTime", + ), + ( + "da_wm_dw_000001", + SENSOR_DOMAIN, + "f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_dishwasherOperatingState_dishwasherJobState_dishwasherJobState", + "f36dc7ce-cac0-0667-dc14-a3704eb5e676.completionTime", + "dishwasher_completion_time", + "f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_dishwasherOperatingState_completionTime_completionTime", + ), + ( + "da_wm_wd_000001", + SENSOR_DOMAIN, + "02f7256e-8353-5bdd-547f-bd5b1647e01b.dryerJobState", + "02f7256e-8353-5bdd-547f-bd5b1647e01b.machineState", + "dryer_machine_state", + "02f7256e-8353-5bdd-547f-bd5b1647e01b_main_dryerOperatingState_machineState_machineState", + ), + ( + "da_wm_wd_000001", + SENSOR_DOMAIN, + "02f7256e-8353-5bdd-547f-bd5b1647e01b_main_dryerOperatingState_dryerJobState_dryerJobState", + "02f7256e-8353-5bdd-547f-bd5b1647e01b.machineState", + "dryer_machine_state", + "02f7256e-8353-5bdd-547f-bd5b1647e01b_main_dryerOperatingState_machineState_machineState", + ), + ( + "da_wm_wd_000001", + SENSOR_DOMAIN, + "02f7256e-8353-5bdd-547f-bd5b1647e01b.dryerJobState", + "02f7256e-8353-5bdd-547f-bd5b1647e01b.completionTime", + "dryer_completion_time", + "02f7256e-8353-5bdd-547f-bd5b1647e01b_main_dryerOperatingState_completionTime_completionTime", + ), + ( + "da_wm_wd_000001", + SENSOR_DOMAIN, + "02f7256e-8353-5bdd-547f-bd5b1647e01b_main_dryerOperatingState_dryerJobState_dryerJobState", + "02f7256e-8353-5bdd-547f-bd5b1647e01b.completionTime", + "dryer_completion_time", + "02f7256e-8353-5bdd-547f-bd5b1647e01b_main_dryerOperatingState_completionTime_completionTime", + ), + ( + "da_wm_wm_000001", + SENSOR_DOMAIN, + "f984b91d-f250-9d42-3436-33f09a422a47.washerJobState", + "f984b91d-f250-9d42-3436-33f09a422a47.machineState", + "washer_machine_state", + "f984b91d-f250-9d42-3436-33f09a422a47_main_washerOperatingState_machineState_machineState", + ), + ( + "da_wm_wm_000001", + SENSOR_DOMAIN, + "f984b91d-f250-9d42-3436-33f09a422a47_main_washerOperatingState_washerJobState_washerJobState", + "f984b91d-f250-9d42-3436-33f09a422a47.machineState", + "washer_machine_state", + "f984b91d-f250-9d42-3436-33f09a422a47_main_washerOperatingState_machineState_machineState", + ), + ( + "da_wm_wm_000001", + SENSOR_DOMAIN, + "f984b91d-f250-9d42-3436-33f09a422a47.washerJobState", + "f984b91d-f250-9d42-3436-33f09a422a47.completionTime", + "washer_completion_time", + "f984b91d-f250-9d42-3436-33f09a422a47_main_washerOperatingState_completionTime_completionTime", + ), + ( + "da_wm_wm_000001", + SENSOR_DOMAIN, + "f984b91d-f250-9d42-3436-33f09a422a47_main_washerOperatingState_washerJobState_washerJobState", + "f984b91d-f250-9d42-3436-33f09a422a47.completionTime", + "washer_completion_time", + "f984b91d-f250-9d42-3436-33f09a422a47_main_washerOperatingState_completionTime_completionTime", + ), + ], +) +async def test_entity_unique_id_migration_machine_state( + hass: HomeAssistant, + devices: AsyncMock, + expires_at: int, + entity_registry: er.EntityRegistry, + domain: str, + other_unique_id: str, + old_unique_id: str, + suggested_object_id: str, + new_unique_id: str, +) -> None: + """Test entity unique ID migration.""" + mock_config_entry = MockConfigEntry( + domain=DOMAIN, + title="My home", + unique_id="397678e5-9995-4a39-9d9f-ae6ba310236c", + data={ + "auth_implementation": DOMAIN, + "token": { + "access_token": "mock-access-token", + "refresh_token": "mock-refresh-token", + "expires_at": expires_at, + "scope": " ".join(SCOPES), + "access_tier": 0, + "installed_app_id": "5aaaa925-2be1-4e40-b257-e4ef59083324", + }, + CONF_LOCATION_ID: "397678e5-9995-4a39-9d9f-ae6ba310236c", + CONF_INSTALLED_APP_ID: "123", + }, + version=3, + minor_version=1, + ) + mock_config_entry.add_to_hass(hass) + entity_registry.async_get_or_create( + domain, + DOMAIN, + other_unique_id, + config_entry=mock_config_entry, + suggested_object_id="job_state", + ) + entry = entity_registry.async_get_or_create( + domain, + DOMAIN, + old_unique_id, + config_entry=mock_config_entry, + suggested_object_id=suggested_object_id, + ) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entry.entity_id) + + assert entry.unique_id == new_unique_id diff --git a/tests/components/smartthings/test_media_player.py b/tests/components/smartthings/test_media_player.py new file mode 100644 index 00000000000..b7cecfe8408 --- /dev/null +++ b/tests/components/smartthings/test_media_player.py @@ -0,0 +1,432 @@ +"""Test for the SmartThings media player platform.""" + +from unittest.mock import AsyncMock + +from pysmartthings import Attribute, Capability, Command, Status +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.media_player import ( + ATTR_INPUT_SOURCE, + ATTR_MEDIA_REPEAT, + ATTR_MEDIA_SHUFFLE, + ATTR_MEDIA_VOLUME_LEVEL, + ATTR_MEDIA_VOLUME_MUTED, + DOMAIN as MEDIA_PLAYER_DOMAIN, + SERVICE_SELECT_SOURCE, + RepeatMode, +) +from homeassistant.components.smartthings.const import MAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_MEDIA_NEXT_TRACK, + SERVICE_MEDIA_PAUSE, + SERVICE_MEDIA_PLAY, + SERVICE_MEDIA_PREVIOUS_TRACK, + SERVICE_MEDIA_STOP, + SERVICE_REPEAT_SET, + SERVICE_SHUFFLE_SET, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + SERVICE_VOLUME_DOWN, + SERVICE_VOLUME_MUTE, + SERVICE_VOLUME_SET, + SERVICE_VOLUME_UP, + STATE_OFF, + STATE_PLAYING, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration, snapshot_smartthings_entities, trigger_update + +from tests.common import MockConfigEntry + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + await setup_integration(hass, mock_config_entry) + + snapshot_smartthings_entities( + hass, entity_registry, snapshot, Platform.MEDIA_PLAYER + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +@pytest.mark.parametrize( + ("action", "command"), + [ + (SERVICE_TURN_ON, Command.ON), + (SERVICE_TURN_OFF, Command.OFF), + ], +) +async def test_turn_on_off( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + action: str, + command: Command, +) -> None: + """Test media player turn on and off command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + action, + {ATTR_ENTITY_ID: "media_player.soundbar"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", Capability.SWITCH, command, MAIN + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +@pytest.mark.parametrize( + ("muted", "argument"), + [ + (True, "muted"), + (False, "unmuted"), + ], +) +async def test_mute_unmute( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + muted: bool, + argument: str, +) -> None: + """Test media player mute and unmute command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_MUTE, + {ATTR_ENTITY_ID: "media_player.soundbar", ATTR_MEDIA_VOLUME_MUTED: muted}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.AUDIO_MUTE, + Command.SET_MUTE, + MAIN, + argument=argument, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_set_volume_level( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test media player set volume level command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_SET, + {ATTR_ENTITY_ID: "media_player.soundbar", ATTR_MEDIA_VOLUME_LEVEL: 0.31}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.AUDIO_VOLUME, + Command.SET_VOLUME, + MAIN, + argument=31, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_volume_up( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test media player increase volume level command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_UP, + {ATTR_ENTITY_ID: "media_player.soundbar"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.AUDIO_VOLUME, + Command.VOLUME_UP, + MAIN, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_volume_down( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test media player decrease volume level command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_DOWN, + {ATTR_ENTITY_ID: "media_player.soundbar"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.AUDIO_VOLUME, + Command.VOLUME_DOWN, + MAIN, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_media_play( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test media player play command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_PLAY, + {ATTR_ENTITY_ID: "media_player.soundbar"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.MEDIA_PLAYBACK, + Command.PLAY, + MAIN, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_media_pause( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test media player pause command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_PAUSE, + {ATTR_ENTITY_ID: "media_player.soundbar"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.MEDIA_PLAYBACK, + Command.PAUSE, + MAIN, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_media_stop( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test media player stop command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_STOP, + {ATTR_ENTITY_ID: "media_player.soundbar"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.MEDIA_PLAYBACK, + Command.STOP, + MAIN, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_media_previous_track( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test media player previous track command.""" + devices.get_device_status.return_value[MAIN][Capability.MEDIA_PLAYBACK] = { + Attribute.SUPPORTED_PLAYBACK_COMMANDS: Status(["rewind"]) + } + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_PREVIOUS_TRACK, + {ATTR_ENTITY_ID: "media_player.soundbar"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.MEDIA_PLAYBACK, + Command.REWIND, + MAIN, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_media_next_track( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test media player next track command.""" + devices.get_device_status.return_value[MAIN][Capability.MEDIA_PLAYBACK] = { + Attribute.SUPPORTED_PLAYBACK_COMMANDS: Status(["fastForward"]) + } + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_NEXT_TRACK, + {ATTR_ENTITY_ID: "media_player.soundbar"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.MEDIA_PLAYBACK, + Command.FAST_FORWARD, + MAIN, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_select_source( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test media player stop command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_SELECT_SOURCE, + {ATTR_ENTITY_ID: "media_player.soundbar", ATTR_INPUT_SOURCE: "digital"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.MEDIA_INPUT_SOURCE, + Command.SET_INPUT_SOURCE, + MAIN, + "digital", + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +@pytest.mark.parametrize( + ("shuffle", "argument"), + [ + (True, "enabled"), + (False, "disabled"), + ], +) +async def test_media_shuffle_on_off( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + shuffle: bool, + argument: bool, +) -> None: + """Test media player media shuffle command.""" + devices.get_device_status.return_value[MAIN][Capability.MEDIA_PLAYBACK_SHUFFLE] = { + Attribute.PLAYBACK_SHUFFLE: Status(True) + } + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_SHUFFLE_SET, + {ATTR_ENTITY_ID: "media_player.soundbar", ATTR_MEDIA_SHUFFLE: shuffle}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.MEDIA_PLAYBACK_SHUFFLE, + Command.SET_PLAYBACK_SHUFFLE, + MAIN, + argument=argument, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +@pytest.mark.parametrize( + ("repeat", "argument"), + [ + (RepeatMode.OFF, "off"), + (RepeatMode.ONE, "one"), + (RepeatMode.ALL, "all"), + ], +) +async def test_media_repeat_mode( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + repeat: RepeatMode, + argument: bool, +) -> None: + """Test media player repeat mode command.""" + devices.get_device_status.return_value[MAIN][Capability.MEDIA_PLAYBACK_REPEAT] = { + Attribute.REPEAT_MODE: Status("one") + } + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_REPEAT_SET, + {ATTR_ENTITY_ID: "media_player.soundbar", ATTR_MEDIA_REPEAT: repeat}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.MEDIA_PLAYBACK_REPEAT, + Command.SET_PLAYBACK_REPEAT_MODE, + MAIN, + argument=argument, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_state_update( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test state update.""" + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("media_player.soundbar").state == STATE_PLAYING + + await trigger_update( + hass, + devices, + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.SWITCH, + Attribute.SWITCH, + "off", + ) + + assert hass.states.get("media_player.soundbar").state == STATE_OFF diff --git a/tests/components/smartthings/test_sensor.py b/tests/components/smartthings/test_sensor.py index c83950de9e9..fe112b3db6b 100644 --- a/tests/components/smartthings/test_sensor.py +++ b/tests/components/smartthings/test_sensor.py @@ -6,9 +6,14 @@ from pysmartthings import Attribute, Capability import pytest from syrupy import SnapshotAssertion +from homeassistant.components import automation, script +from homeassistant.components.automation import automations_with_entity +from homeassistant.components.script import scripts_with_entity +from homeassistant.components.smartthings.const import DOMAIN from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import entity_registry as er, issue_registry as ir +from homeassistant.setup import async_setup_component from . import setup_integration, snapshot_smartthings_entities, trigger_update @@ -49,3 +54,90 @@ async def test_state_update( ) assert hass.states.get("sensor.ac_office_granit_temperature").state == "20" + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize( + ("device_fixture", "entity_id", "translation_key"), + [ + ("hw_q80r_soundbar", "sensor.soundbar_volume", "media_player"), + ("hw_q80r_soundbar", "sensor.soundbar_media_playback_status", "media_player"), + ("hw_q80r_soundbar", "sensor.soundbar_media_input_source", "media_player"), + ( + "im_speaker_ai_0001", + "sensor.galaxy_home_mini_media_playback_shuffle", + "media_player", + ), + ( + "im_speaker_ai_0001", + "sensor.galaxy_home_mini_media_playback_repeat", + "media_player", + ), + ], +) +async def test_create_issue( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + issue_registry: ir.IssueRegistry, + entity_id: str, + translation_key: str, +) -> None: + """Test we create an issue when an automation or script is using a deprecated entity.""" + issue_id = f"deprecated_{translation_key}_{entity_id}" + + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "id": "test", + "alias": "test", + "trigger": {"platform": "state", "entity_id": entity_id}, + "action": { + "action": "automation.turn_on", + "target": { + "entity_id": "automation.test", + }, + }, + } + }, + ) + assert await async_setup_component( + hass, + script.DOMAIN, + { + script.DOMAIN: { + "test": { + "sequence": [ + { + "condition": "state", + "entity_id": entity_id, + "state": "on", + }, + ], + } + } + }, + ) + + await setup_integration(hass, mock_config_entry) + + assert automations_with_entity(hass, entity_id)[0] == "automation.test" + assert scripts_with_entity(hass, entity_id)[0] == "script.test" + + assert len(issue_registry.issues) == 1 + issue = issue_registry.async_get_issue(DOMAIN, issue_id) + assert issue is not None + assert issue.translation_key == f"deprecated_{translation_key}" + assert issue.translation_placeholders == { + "entity": entity_id, + "items": "- [test](/config/automation/edit/test)\n- [test](/config/script/edit/test)", + } + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue(DOMAIN, issue_id) + assert len(issue_registry.issues) == 0 diff --git a/tests/components/smartthings/test_switch.py b/tests/components/smartthings/test_switch.py index 28bac49b0b0..2e360ff68e3 100644 --- a/tests/components/smartthings/test_switch.py +++ b/tests/components/smartthings/test_switch.py @@ -6,7 +6,10 @@ from pysmartthings import Attribute, Capability, Command import pytest from syrupy import SnapshotAssertion -from homeassistant.components.smartthings.const import MAIN +from homeassistant.components import automation, script +from homeassistant.components.automation import automations_with_entity +from homeassistant.components.script import scripts_with_entity +from homeassistant.components.smartthings.const import DOMAIN, MAIN from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, @@ -17,7 +20,8 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import entity_registry as er, issue_registry as ir +from homeassistant.setup import async_setup_component from . import setup_integration, snapshot_smartthings_entities, trigger_update @@ -120,3 +124,80 @@ async def test_state_update( ) assert hass.states.get("switch.2nd_floor_hallway").state == STATE_OFF + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize( + ("device_fixture", "entity_id", "translation_key"), + [ + ("da_wm_wm_000001", "switch.washer", "deprecated_switch_appliance"), + ("da_wm_wd_000001", "switch.dryer", "deprecated_switch_appliance"), + ("hw_q80r_soundbar", "switch.soundbar", "deprecated_switch_media_player"), + ], +) +async def test_create_issue( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + issue_registry: ir.IssueRegistry, + entity_id: str, + translation_key: str, +) -> None: + """Test we create an issue when an automation or script is using a deprecated entity.""" + issue_id = f"deprecated_switch_{entity_id}" + + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "id": "test", + "alias": "test", + "trigger": {"platform": "state", "entity_id": entity_id}, + "action": { + "action": "automation.turn_on", + "target": { + "entity_id": "automation.test", + }, + }, + } + }, + ) + assert await async_setup_component( + hass, + script.DOMAIN, + { + script.DOMAIN: { + "test": { + "sequence": [ + { + "condition": "state", + "entity_id": entity_id, + "state": "on", + }, + ], + } + } + }, + ) + + await setup_integration(hass, mock_config_entry) + + assert automations_with_entity(hass, entity_id)[0] == "automation.test" + assert scripts_with_entity(hass, entity_id)[0] == "script.test" + + assert len(issue_registry.issues) == 1 + issue = issue_registry.async_get_issue(DOMAIN, issue_id) + assert issue is not None + assert issue.translation_key == translation_key + assert issue.translation_placeholders == { + "entity": entity_id, + "items": "- [test](/config/automation/edit/test)\n- [test](/config/script/edit/test)", + } + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue(DOMAIN, issue_id) + assert len(issue_registry.issues) == 0 diff --git a/tests/components/smlight/test_config_flow.py b/tests/components/smlight/test_config_flow.py index c8933029ce6..4ecfe9366e3 100644 --- a/tests/components/smlight/test_config_flow.py +++ b/tests/components/smlight/test_config_flow.py @@ -193,7 +193,7 @@ async def test_zeroconf_flow_auth( } assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_smlight_client.get_info.mock_calls) == 3 + assert len(mock_smlight_client.get_info.mock_calls) == 2 async def test_zeroconf_unsupported_abort( @@ -406,7 +406,7 @@ async def test_user_invalid_auth( } assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_smlight_client.get_info.mock_calls) == 3 + assert len(mock_smlight_client.get_info.mock_calls) == 2 async def test_user_cannot_connect( @@ -443,7 +443,7 @@ async def test_user_cannot_connect( assert result2["title"] == "SLZB-06p7" assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_smlight_client.get_info.mock_calls) == 3 + assert len(mock_smlight_client.get_info.mock_calls) == 2 async def test_auth_cannot_connect( diff --git a/tests/components/smlight/test_sensor.py b/tests/components/smlight/test_sensor.py index f130d7ccf30..bec73bc514a 100644 --- a/tests/components/smlight/test_sensor.py +++ b/tests/components/smlight/test_sensor.py @@ -2,17 +2,18 @@ from unittest.mock import MagicMock -from pysmlight import Sensors +from pysmlight import Info, Sensors import pytest from syrupy.assertion import SnapshotAssertion +from homeassistant.components.smlight.const import DOMAIN from homeassistant.const import STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from .conftest import setup_integration -from tests.common import MockConfigEntry, snapshot_platform +from tests.common import MockConfigEntry, load_json_object_fixture, snapshot_platform pytestmark = [ pytest.mark.usefixtures( @@ -73,3 +74,38 @@ async def test_zigbee_uptime_disconnected( state = hass.states.get("sensor.mock_title_zigbee_uptime") assert state.state == STATE_UNKNOWN + + +async def test_zigbee2_temp_sensor( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, +) -> None: + """Test for zb_temp2 if device has second radio.""" + mock_smlight_client.get_sensors.return_value = Sensors(zb_temp2=20.45) + await setup_integration(hass, mock_config_entry) + + state = hass.states.get("sensor.mock_title_zigbee_chip_temp_2") + assert state + assert state.state == "20.45" + + +async def test_zigbee_type_sensors( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, +) -> None: + """Test for zigbee type sensor with second radio.""" + mock_smlight_client.get_info.side_effect = None + mock_smlight_client.get_info.return_value = Info.from_dict( + load_json_object_fixture("info-MR1.json", DOMAIN) + ) + await setup_integration(hass, mock_config_entry) + + state = hass.states.get("sensor.mock_title_zigbee_type") + assert state + assert state.state == "coordinator" + + state = hass.states.get("sensor.mock_title_zigbee_type_2") + assert state + assert state.state == "router" diff --git a/tests/components/smlight/test_update.py b/tests/components/smlight/test_update.py index 86d19968910..d120a08d519 100644 --- a/tests/components/smlight/test_update.py +++ b/tests/components/smlight/test_update.py @@ -154,10 +154,9 @@ async def test_update_zigbee2_firmware( mock_smlight_client: MagicMock, ) -> None: """Test update of zigbee2 firmware where available.""" + mock_info = Info.from_dict(load_json_object_fixture("info-MR1.json", DOMAIN)) mock_smlight_client.get_info.side_effect = None - mock_smlight_client.get_info.return_value = Info.from_dict( - load_json_object_fixture("info-MR1.json", DOMAIN) - ) + mock_smlight_client.get_info.return_value = mock_info await setup_integration(hass, mock_config_entry) entity_id = "update.mock_title_zigbee_firmware_2" state = hass.states.get(entity_id) @@ -177,17 +176,17 @@ async def test_update_zigbee2_firmware( event_function = get_mock_event_function(mock_smlight_client, SmEvents.FW_UPD_done) event_function(MOCK_FIRMWARE_DONE) - with patch( - "homeassistant.components.smlight.update.get_radio", return_value=MOCK_RADIO - ): - freezer.tick(timedelta(seconds=5)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state.state == STATE_OFF - assert state.attributes[ATTR_INSTALLED_VERSION] == "20240716" - assert state.attributes[ATTR_LATEST_VERSION] == "20240716" + mock_info.radios[1] = MOCK_RADIO + + freezer.tick(timedelta(seconds=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state == STATE_OFF + assert state.attributes[ATTR_INSTALLED_VERSION] == "20240716" + assert state.attributes[ATTR_LATEST_VERSION] == "20240716" async def test_update_legacy_firmware_v2( diff --git a/tests/components/sunweg/__init__.py b/tests/components/sunweg/__init__.py index 1453483a3fd..d9dac10eeb6 100644 --- a/tests/components/sunweg/__init__.py +++ b/tests/components/sunweg/__init__.py @@ -1 +1 @@ -"""Tests for the sunweg component.""" +"""Tests for the Sun WEG integration.""" diff --git a/tests/components/sunweg/common.py b/tests/components/sunweg/common.py deleted file mode 100644 index 096113f6609..00000000000 --- a/tests/components/sunweg/common.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Common functions needed to setup tests for Sun WEG.""" - -from homeassistant.components.sunweg.const import CONF_PLANT_ID, DOMAIN -from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME - -from tests.common import MockConfigEntry - -SUNWEG_USER_INPUT = { - CONF_USERNAME: "username", - CONF_PASSWORD: "password", -} - -SUNWEG_MOCK_ENTRY = MockConfigEntry( - domain=DOMAIN, - unique_id=0, - data={ - CONF_USERNAME: "user@email.com", - CONF_PASSWORD: "password", - CONF_PLANT_ID: 0, - CONF_NAME: "Name", - }, -) diff --git a/tests/components/sunweg/conftest.py b/tests/components/sunweg/conftest.py deleted file mode 100644 index db94b9cc5c8..00000000000 --- a/tests/components/sunweg/conftest.py +++ /dev/null @@ -1,90 +0,0 @@ -"""Conftest for SunWEG tests.""" - -from datetime import datetime - -import pytest -from sunweg.device import MPPT, Inverter, Phase, String -from sunweg.plant import Plant - - -@pytest.fixture -def string_fixture() -> String: - """Define String fixture.""" - return String("STR1", 450.3, 23.4, 0) - - -@pytest.fixture -def mppt_fixture(string_fixture) -> MPPT: - """Define MPPT fixture.""" - mppt = MPPT("mppt") - mppt.strings.append(string_fixture) - return mppt - - -@pytest.fixture -def phase_fixture() -> Phase: - """Define Phase fixture.""" - return Phase("PhaseA", 120.0, 3.2, 0, 0) - - -@pytest.fixture -def inverter_fixture(phase_fixture, mppt_fixture) -> Inverter: - """Define inverter fixture.""" - inverter = Inverter( - 21255, - "INVERSOR01", - "J63T233018RE074", - 23.2, - 0.0, - 0.0, - "MWh", - 0, - "kWh", - 0.0, - 1, - 0, - "kW", - ) - inverter.phases.append(phase_fixture) - inverter.mppts.append(mppt_fixture) - return inverter - - -@pytest.fixture -def plant_fixture(inverter_fixture) -> Plant: - """Define Plant fixture.""" - plant = Plant( - 123456, - "Plant #123", - 29.5, - 0.5, - 0, - 12.786912, - 24.0, - "kWh", - 332.2, - 0.012296, - datetime(2023, 2, 16, 14, 22, 37), - ) - plant.inverters.append(inverter_fixture) - return plant - - -@pytest.fixture -def plant_fixture_alternative(inverter_fixture) -> Plant: - """Define Plant fixture.""" - plant = Plant( - 123456, - "Plant #123", - 29.5, - 0.5, - 0, - 12.786912, - 24.0, - "kWh", - 332.2, - 0.012296, - None, - ) - plant.inverters.append(inverter_fixture) - return plant diff --git a/tests/components/sunweg/test_config_flow.py b/tests/components/sunweg/test_config_flow.py deleted file mode 100644 index 8103003d7fb..00000000000 --- a/tests/components/sunweg/test_config_flow.py +++ /dev/null @@ -1,223 +0,0 @@ -"""Tests for the Sun WEG server config flow.""" - -from unittest.mock import patch - -from sunweg.api import APIHelper, SunWegApiError - -from homeassistant import config_entries -from homeassistant.components.sunweg.const import CONF_PLANT_ID, DOMAIN -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from .common import SUNWEG_MOCK_ENTRY, SUNWEG_USER_INPUT - -from tests.common import MockConfigEntry - - -async def test_show_authenticate_form(hass: HomeAssistant) -> None: - """Test that the setup form is served.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - -async def test_incorrect_login(hass: HomeAssistant) -> None: - """Test that it shows the appropriate error when an incorrect username/password/server is entered.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with patch.object(APIHelper, "authenticate", return_value=False): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], SUNWEG_USER_INPUT - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {"base": "invalid_auth"} - - -async def test_server_unavailable(hass: HomeAssistant) -> None: - """Test when the SunWEG server don't respond.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with patch.object( - APIHelper, "authenticate", side_effect=SunWegApiError("Internal Server Error") - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], SUNWEG_USER_INPUT - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {"base": "timeout_connect"} - - -async def test_reauth(hass: HomeAssistant, plant_fixture, inverter_fixture) -> None: - """Test reauth flow.""" - mock_entry = SUNWEG_MOCK_ENTRY - mock_entry.add_to_hass(hass) - - entries = hass.config_entries.async_entries() - assert len(entries) == 1 - assert entries[0].data[CONF_USERNAME] == SUNWEG_MOCK_ENTRY.data[CONF_USERNAME] - assert entries[0].data[CONF_PASSWORD] == SUNWEG_MOCK_ENTRY.data[CONF_PASSWORD] - - result = await mock_entry.start_reauth_flow(hass) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - - with patch.object(APIHelper, "authenticate", return_value=False): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input=SUNWEG_USER_INPUT, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - assert result["errors"] == {"base": "invalid_auth"} - - with patch.object( - APIHelper, "authenticate", side_effect=SunWegApiError("Internal Server Error") - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input=SUNWEG_USER_INPUT, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - assert result["errors"] == {"base": "timeout_connect"} - - with ( - patch.object(APIHelper, "authenticate", return_value=True), - patch.object(APIHelper, "listPlants", return_value=[plant_fixture]), - patch.object(APIHelper, "plant", return_value=plant_fixture), - patch.object(APIHelper, "inverter", return_value=inverter_fixture), - patch.object(APIHelper, "complete_inverter"), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input=SUNWEG_USER_INPUT, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" - - entries = hass.config_entries.async_entries() - - assert len(entries) == 1 - assert entries[0].data[CONF_USERNAME] == SUNWEG_USER_INPUT[CONF_USERNAME] - assert entries[0].data[CONF_PASSWORD] == SUNWEG_USER_INPUT[CONF_PASSWORD] - - -async def test_no_plants_on_account(hass: HomeAssistant) -> None: - """Test registering an integration with wrong auth then with no plants available.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with patch.object(APIHelper, "authenticate", return_value=False): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], SUNWEG_USER_INPUT - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {"base": "invalid_auth"} - - with ( - patch.object(APIHelper, "authenticate", return_value=True), - patch.object(APIHelper, "listPlants", return_value=[]), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], SUNWEG_USER_INPUT - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "no_plants" - - -async def test_multiple_plant_ids(hass: HomeAssistant, plant_fixture) -> None: - """Test registering an integration and finishing flow with an selected plant_id.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with ( - patch.object(APIHelper, "authenticate", return_value=True), - patch.object( - APIHelper, "listPlants", return_value=[plant_fixture, plant_fixture] - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], SUNWEG_USER_INPUT - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "plant" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_PLANT_ID: 123456} - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"][CONF_USERNAME] == SUNWEG_USER_INPUT[CONF_USERNAME] - assert result["data"][CONF_PASSWORD] == SUNWEG_USER_INPUT[CONF_PASSWORD] - assert result["data"][CONF_PLANT_ID] == 123456 - - -async def test_one_plant_on_account(hass: HomeAssistant, plant_fixture) -> None: - """Test registering an integration and finishing flow with current plant_id.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with ( - patch.object(APIHelper, "authenticate", return_value=True), - patch.object( - APIHelper, - "listPlants", - return_value=[plant_fixture], - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], SUNWEG_USER_INPUT - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"][CONF_USERNAME] == SUNWEG_USER_INPUT[CONF_USERNAME] - assert result["data"][CONF_PASSWORD] == SUNWEG_USER_INPUT[CONF_PASSWORD] - assert result["data"][CONF_PLANT_ID] == 123456 - - -async def test_existing_plant_configured(hass: HomeAssistant, plant_fixture) -> None: - """Test entering an existing plant_id.""" - entry = MockConfigEntry(domain=DOMAIN, unique_id=123456) - entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with ( - patch.object(APIHelper, "authenticate", return_value=True), - patch.object( - APIHelper, - "listPlants", - return_value=[plant_fixture], - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], SUNWEG_USER_INPUT - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" diff --git a/tests/components/sunweg/test_init.py b/tests/components/sunweg/test_init.py index 6cbe38a128b..964b48aebcb 100644 --- a/tests/components/sunweg/test_init.py +++ b/tests/components/sunweg/test_init.py @@ -1,209 +1,79 @@ -"""Tests for the Sun WEG init.""" +"""Tests for the Sun WEG integration.""" -import json -from unittest.mock import MagicMock, patch - -from sunweg.api import APIHelper, SunWegApiError - -from homeassistant.components.sunweg import SunWEGData -from homeassistant.components.sunweg.const import DOMAIN, DeviceType -from homeassistant.components.sunweg.sensor.sensor_entity_description import ( - SunWEGSensorEntityDescription, +from homeassistant.components.sunweg import DOMAIN +from homeassistant.config_entries import ( + SOURCE_IGNORE, + ConfigEntryDisabler, + ConfigEntryState, ) -from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component +from homeassistant.helpers import issue_registry as ir -from .common import SUNWEG_MOCK_ENTRY +from tests.common import MockConfigEntry -async def test_methods(hass: HomeAssistant, plant_fixture, inverter_fixture) -> None: - """Test methods.""" - mock_entry = SUNWEG_MOCK_ENTRY - mock_entry.add_to_hass(hass) - - with ( - patch.object(APIHelper, "authenticate", return_value=True), - patch.object(APIHelper, "listPlants", return_value=[plant_fixture]), - patch.object(APIHelper, "plant", return_value=plant_fixture), - patch.object(APIHelper, "inverter", return_value=inverter_fixture), - patch.object(APIHelper, "complete_inverter"), - ): - assert await async_setup_component(hass, DOMAIN, mock_entry.data) - await hass.async_block_till_done() - assert await hass.config_entries.async_unload(mock_entry.entry_id) - - -async def test_setup_wrongpass(hass: HomeAssistant) -> None: - """Test setup with wrong pass.""" - mock_entry = SUNWEG_MOCK_ENTRY - mock_entry.add_to_hass(hass) - with patch.object(APIHelper, "authenticate", return_value=False): - assert await async_setup_component(hass, DOMAIN, mock_entry.data) - await hass.async_block_till_done() - - -async def test_setup_error_500(hass: HomeAssistant) -> None: - """Test setup with wrong pass.""" - mock_entry = SUNWEG_MOCK_ENTRY - mock_entry.add_to_hass(hass) - with patch.object( - APIHelper, "authenticate", side_effect=SunWegApiError("Error 500") - ): - assert await async_setup_component(hass, DOMAIN, mock_entry.data) - await hass.async_block_till_done() - - -async def test_sunwegdata_update_exception() -> None: - """Test SunWEGData exception on update.""" - api = MagicMock() - api.plant = MagicMock(side_effect=json.decoder.JSONDecodeError("Message", "Doc", 1)) - data = SunWEGData(api, 0) - data.update() - assert data.data is None - - -async def test_sunwegdata_update_success(plant_fixture) -> None: - """Test SunWEGData success on update.""" - api = MagicMock() - api.plant = MagicMock(return_value=plant_fixture) - api.complete_inverter = MagicMock() - data = SunWEGData(api, 0) - data.update() - assert data.data.id == plant_fixture.id - assert data.data.name == plant_fixture.name - assert data.data.kwh_per_kwp == plant_fixture.kwh_per_kwp - assert data.data.last_update == plant_fixture.last_update - assert data.data.performance_rate == plant_fixture.performance_rate - assert data.data.saving == plant_fixture.saving - assert len(data.data.inverters) == 1 - - -async def test_sunwegdata_update_success_alternative(plant_fixture_alternative) -> None: - """Test SunWEGData success on update.""" - api = MagicMock() - api.plant = MagicMock(return_value=plant_fixture_alternative) - api.complete_inverter = MagicMock() - data = SunWEGData(api, 0) - data.update() - assert data.data.id == plant_fixture_alternative.id - assert data.data.name == plant_fixture_alternative.name - assert data.data.kwh_per_kwp == plant_fixture_alternative.kwh_per_kwp - assert data.data.last_update == plant_fixture_alternative.last_update - assert data.data.performance_rate == plant_fixture_alternative.performance_rate - assert data.data.saving == plant_fixture_alternative.saving - assert len(data.data.inverters) == 1 - - -async def test_sunwegdata_get_api_value_none(plant_fixture) -> None: - """Test SunWEGData none return on get_api_value.""" - api = MagicMock() - data = SunWEGData(api, 123456) - data.data = plant_fixture - assert data.get_api_value("variable", DeviceType.INVERTER, 0, "deep_name") is None - assert data.get_api_value("variable", DeviceType.STRING, 21255, "deep_name") is None - - -async def test_sunwegdata_get_data_drop_threshold() -> None: - """Test SunWEGData get_data with drop threshold.""" - api = MagicMock() - data = SunWEGData(api, 123456) - data.get_api_value = MagicMock() - entity_description = SunWEGSensorEntityDescription( - api_variable_key="variable", key="key", previous_value_drop_threshold=0.1 +async def test_sunweg_repair_issue( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test the Sun WEG configuration entry loading/unloading handles the repair.""" + config_entry_1 = MockConfigEntry( + title="Example 1", + domain=DOMAIN, ) - data.get_api_value.return_value = 3.0 - assert data.get_data( - api_variable_key=entity_description.api_variable_key, - api_variable_unit=entity_description.api_variable_unit, - deep_name=None, - device_type=DeviceType.TOTAL, - inverter_id=0, - name=entity_description.name, - native_unit_of_measurement=entity_description.native_unit_of_measurement, - never_resets=entity_description.never_resets, - previous_value_drop_threshold=entity_description.previous_value_drop_threshold, - ) == (3.0, None) - data.get_api_value.return_value = 2.91 - assert data.get_data( - api_variable_key=entity_description.api_variable_key, - api_variable_unit=entity_description.api_variable_unit, - deep_name=None, - device_type=DeviceType.TOTAL, - inverter_id=0, - name=entity_description.name, - native_unit_of_measurement=entity_description.native_unit_of_measurement, - never_resets=entity_description.never_resets, - previous_value_drop_threshold=entity_description.previous_value_drop_threshold, - ) == (3.0, None) - data.get_api_value.return_value = 2.8 - assert data.get_data( - api_variable_key=entity_description.api_variable_key, - api_variable_unit=entity_description.api_variable_unit, - deep_name=None, - device_type=DeviceType.TOTAL, - inverter_id=0, - name=entity_description.name, - native_unit_of_measurement=entity_description.native_unit_of_measurement, - never_resets=entity_description.never_resets, - previous_value_drop_threshold=entity_description.previous_value_drop_threshold, - ) == (2.8, None) + config_entry_1.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry_1.entry_id) + await hass.async_block_till_done() + assert config_entry_1.state is ConfigEntryState.LOADED - -async def test_sunwegdata_get_data_never_reset() -> None: - """Test SunWEGData get_data with never reset.""" - api = MagicMock() - data = SunWEGData(api, 123456) - data.get_api_value = MagicMock() - entity_description = SunWEGSensorEntityDescription( - api_variable_key="variable", key="key", never_resets=True + # Add a second one + config_entry_2 = MockConfigEntry( + title="Example 2", + domain=DOMAIN, ) - data.get_api_value.return_value = 3.0 - assert data.get_data( - api_variable_key=entity_description.api_variable_key, - api_variable_unit=entity_description.api_variable_unit, - deep_name=None, - device_type=DeviceType.TOTAL, - inverter_id=0, - name=entity_description.name, - native_unit_of_measurement=entity_description.native_unit_of_measurement, - never_resets=entity_description.never_resets, - previous_value_drop_threshold=entity_description.previous_value_drop_threshold, - ) == (3.0, None) - data.get_api_value.return_value = 0 - assert data.get_data( - api_variable_key=entity_description.api_variable_key, - api_variable_unit=entity_description.api_variable_unit, - deep_name=None, - device_type=DeviceType.TOTAL, - inverter_id=0, - name=entity_description.name, - native_unit_of_measurement=entity_description.native_unit_of_measurement, - never_resets=entity_description.never_resets, - previous_value_drop_threshold=entity_description.previous_value_drop_threshold, - ) == (3.0, None) - data.get_api_value.return_value = 2.8 - assert data.get_data( - api_variable_key=entity_description.api_variable_key, - api_variable_unit=entity_description.api_variable_unit, - deep_name=None, - device_type=DeviceType.TOTAL, - inverter_id=0, - name=entity_description.name, - native_unit_of_measurement=entity_description.native_unit_of_measurement, - never_resets=entity_description.never_resets, - previous_value_drop_threshold=entity_description.previous_value_drop_threshold, - ) == (2.8, None) + config_entry_2.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry_2.entry_id) + await hass.async_block_till_done() + assert config_entry_2.state is ConfigEntryState.LOADED + assert issue_registry.async_get_issue(DOMAIN, DOMAIN) -async def test_reauth_started(hass: HomeAssistant) -> None: - """Test reauth flow started.""" - mock_entry = SUNWEG_MOCK_ENTRY - mock_entry.add_to_hass(hass) - with patch.object(APIHelper, "authenticate", return_value=False): - await async_setup_component(hass, DOMAIN, {}) - await hass.async_block_till_done() - assert mock_entry.state is ConfigEntryState.SETUP_ERROR - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - assert flows[0]["step_id"] == "reauth_confirm" + # Add an ignored entry + config_entry_3 = MockConfigEntry( + source=SOURCE_IGNORE, + domain=DOMAIN, + ) + config_entry_3.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry_3.entry_id) + await hass.async_block_till_done() + + assert config_entry_3.state is ConfigEntryState.NOT_LOADED + + # Add a disabled entry + config_entry_4 = MockConfigEntry( + disabled_by=ConfigEntryDisabler.USER, + domain=DOMAIN, + ) + config_entry_4.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry_4.entry_id) + await hass.async_block_till_done() + + assert config_entry_4.state is ConfigEntryState.NOT_LOADED + + # Remove the first one + await hass.config_entries.async_remove(config_entry_1.entry_id) + await hass.async_block_till_done() + + assert config_entry_1.state is ConfigEntryState.NOT_LOADED + assert config_entry_2.state is ConfigEntryState.LOADED + assert issue_registry.async_get_issue(DOMAIN, DOMAIN) + + # Remove the second one + await hass.config_entries.async_remove(config_entry_2.entry_id) + await hass.async_block_till_done() + + assert config_entry_1.state is ConfigEntryState.NOT_LOADED + assert config_entry_2.state is ConfigEntryState.NOT_LOADED + assert issue_registry.async_get_issue(DOMAIN, DOMAIN) is None + + # Check the ignored and disabled entries are removed + assert not hass.config_entries.async_entries(DOMAIN) diff --git a/tests/components/switchbot/__init__.py b/tests/components/switchbot/__init__.py index 4d6794b962f..d123c93a873 100644 --- a/tests/components/switchbot/__init__.py +++ b/tests/components/switchbot/__init__.py @@ -294,3 +294,28 @@ REMOTE_SERVICE_INFO = BluetoothServiceInfoBleak( connectable=False, tx_power=-127, ) + + +WOHUB2_SERVICE_INFO = BluetoothServiceInfoBleak( + name="WoHub2", + manufacturer_data={ + 2409: b"\xe7\x06\x1dx\x99y\x00\xffg\xe2\xbf]\x84\x04\x9a,\x00", + }, + service_data={"0000fd3d-0000-1000-8000-00805f9b34fb": b"v\x00"}, + service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"], + address="AA:BB:CC:DD:EE:FF", + rssi=-60, + source="local", + advertisement=generate_advertisement_data( + local_name="WoHub2", + manufacturer_data={ + 2409: b"\xe7\x06\x1dx\x99y\x00\xffg\xe2\xbf]\x84\x04\x9a,\x00", + }, + service_data={"0000fd3d-0000-1000-8000-00805f9b34fb": b"v\x00"}, + service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"], + ), + device=generate_ble_device("AA:BB:CC:DD:EE:FF", "WoHub2"), + time=0, + connectable=True, + tx_power=-127, +) diff --git a/tests/components/switchbot/test_sensor.py b/tests/components/switchbot/test_sensor.py index 6a7111a054e..5fd270b3393 100644 --- a/tests/components/switchbot/test_sensor.py +++ b/tests/components/switchbot/test_sensor.py @@ -25,6 +25,7 @@ from . import ( LEAK_SERVICE_INFO, REMOTE_SERVICE_INFO, WOHAND_SERVICE_INFO, + WOHUB2_SERVICE_INFO, WOMETERTHPC_SERVICE_INFO, WORELAY_SWITCH_1PM_SERVICE_INFO, ) @@ -234,3 +235,61 @@ async def test_remote(hass: HomeAssistant) -> None: assert await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_hub2_sensor(hass: HomeAssistant) -> None: + """Test setting up creates the sensor for WoHub2.""" + await async_setup_component(hass, DOMAIN, {}) + inject_bluetooth_service_info(hass, WOHUB2_SERVICE_INFO) + + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_ADDRESS: "AA:BB:CC:DD:EE:FF", + CONF_NAME: "test-name", + CONF_SENSOR_TYPE: "hub2", + }, + unique_id="aabbccddeeff", + ) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert len(hass.states.async_all("sensor")) == 5 + + temperature_sensor = hass.states.get("sensor.test_name_temperature") + temperature_sensor_attrs = temperature_sensor.attributes + assert temperature_sensor.state == "26.4" + assert temperature_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Temperature" + assert temperature_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "°C" + assert temperature_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + humidity_sensor = hass.states.get("sensor.test_name_humidity") + humidity_sensor_attrs = humidity_sensor.attributes + assert humidity_sensor.state == "44" + assert humidity_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Humidity" + assert humidity_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "%" + assert humidity_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + light_level_sensor = hass.states.get("sensor.test_name_light_level") + light_level_sensor_attrs = light_level_sensor.attributes + assert light_level_sensor.state == "4" + assert light_level_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Light level" + assert light_level_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "Level" + + light_level_sensor = hass.states.get("sensor.test_name_illuminance") + light_level_sensor_attrs = light_level_sensor.attributes + assert light_level_sensor.state == "30" + assert light_level_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Illuminance" + assert light_level_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "lx" + + rssi_sensor = hass.states.get("sensor.test_name_bluetooth_signal") + rssi_sensor_attrs = rssi_sensor.attributes + assert rssi_sensor.state == "-60" + assert rssi_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Bluetooth signal" + assert rssi_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "dBm" + + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/synology_dsm/test_backup.py b/tests/components/synology_dsm/test_backup.py index 8475a253231..db0062b45bf 100644 --- a/tests/components/synology_dsm/test_backup.py +++ b/tests/components/synology_dsm/test_backup.py @@ -4,9 +4,13 @@ from io import StringIO from typing import Any from unittest.mock import ANY, AsyncMock, MagicMock, Mock, patch +from freezegun.api import FrozenDateTimeFactory import pytest from synology_dsm.api.file_station.models import SynoFileFile, SynoFileSharedFolder -from synology_dsm.exceptions import SynologyDSMAPIErrorException +from synology_dsm.exceptions import ( + SynologyDSMAPIErrorException, + SynologyDSMRequestException, +) from homeassistant.components.backup import ( DOMAIN as BACKUP_DOMAIN, @@ -279,6 +283,50 @@ async def test_agents_on_unload( } +async def test_agents_on_changed_update_success( + hass: HomeAssistant, + setup_dsm_with_filestation: MagicMock, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, +) -> None: + """Test backup agent on changed update success of coordintaor.""" + client = await hass_ws_client(hass) + + # config entry is loaded + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + assert response["success"] + assert len(response["result"]["agents"]) == 2 + + # coordinator update was successful + freezer.tick(910) # 15 min interval + 10s + await hass.async_block_till_done(wait_background_tasks=True) + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + assert response["success"] + assert len(response["result"]["agents"]) == 2 + + # coordinator update was un-successful + setup_dsm_with_filestation.update.side_effect = SynologyDSMRequestException( + OSError() + ) + freezer.tick(910) + await hass.async_block_till_done(wait_background_tasks=True) + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + assert response["success"] + assert len(response["result"]["agents"]) == 1 + + # coordinator update was successful again + setup_dsm_with_filestation.update.side_effect = None + freezer.tick(910) + await hass.async_block_till_done(wait_background_tasks=True) + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + assert response["success"] + assert len(response["result"]["agents"]) == 2 + + async def test_agents_list_backups( hass: HomeAssistant, setup_dsm_with_filestation: MagicMock, diff --git a/tests/components/system_bridge/test_init.py b/tests/components/system_bridge/test_init.py index 7632a0c8157..25ccbdeb46c 100644 --- a/tests/components/system_bridge/test_init.py +++ b/tests/components/system_bridge/test_init.py @@ -81,3 +81,53 @@ async def test_migration_minor_future_version(hass: HomeAssistant) -> None: assert config_entry.minor_version == config_entry_minor_version assert config_entry.data == config_entry_data assert config_entry.state is ConfigEntryState.LOADED + + +async def test_setup_timeout(hass: HomeAssistant) -> None: + """Test setup with timeout error.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=FIXTURE_UUID, + data=FIXTURE_USER_INPUT, + version=SystemBridgeConfigFlow.VERSION, + minor_version=SystemBridgeConfigFlow.MINOR_VERSION, + ) + + with patch( + "systembridgeconnector.version.Version.check_supported", + side_effect=TimeoutError, + ): + config_entry.add_to_hass(hass) + result = await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert result is False + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_coordinator_get_data_timeout(hass: HomeAssistant) -> None: + """Test coordinator handling timeout during get_data.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=FIXTURE_UUID, + data=FIXTURE_USER_INPUT, + version=SystemBridgeConfigFlow.VERSION, + minor_version=SystemBridgeConfigFlow.MINOR_VERSION, + ) + + with ( + patch( + "systembridgeconnector.version.Version.check_supported", + return_value=True, + ), + patch( + "homeassistant.components.system_bridge.coordinator.SystemBridgeDataUpdateCoordinator.async_get_data", + side_effect=TimeoutError, + ), + ): + config_entry.add_to_hass(hass) + result = await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert result is False + assert config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/tado/__init__.py b/tests/components/tado/__init__.py index 11d199f01a1..e6b6257e6ea 100644 --- a/tests/components/tado/__init__.py +++ b/tests/components/tado/__init__.py @@ -1 +1 @@ -"""Tests for the tado integration.""" +"""Tests for the Tado integration.""" diff --git a/tests/components/tado/conftest.py b/tests/components/tado/conftest.py new file mode 100644 index 00000000000..1aa62b218a2 --- /dev/null +++ b/tests/components/tado/conftest.py @@ -0,0 +1,50 @@ +"""Fixtures for Tado tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +from PyTado.http import DeviceActivationStatus +import pytest + +from homeassistant.components.tado import CONF_REFRESH_TOKEN, DOMAIN + +from tests.common import MockConfigEntry, load_json_object_fixture + + +@pytest.fixture +def mock_tado_api() -> Generator[MagicMock]: + """Mock the Tado API.""" + with ( + patch("homeassistant.components.tado.Tado") as mock_tado, + patch("homeassistant.components.tado.config_flow.Tado", new=mock_tado), + ): + client = mock_tado.return_value + client.device_verification_url.return_value = ( + "https://login.tado.com/oauth2/device?user_code=TEST" + ) + client.device_activation_status.return_value = DeviceActivationStatus.COMPLETED + client.get_me.return_value = load_json_object_fixture("me.json", DOMAIN) + client.get_refresh_token.return_value = "refresh" + yield client + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Mock the setup entry.""" + with patch( + "homeassistant.components.tado.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + data={ + CONF_REFRESH_TOKEN: "refresh", + }, + unique_id="1", + version=2, + ) diff --git a/tests/components/tado/fixtures/device_authorize.json b/tests/components/tado/fixtures/device_authorize.json new file mode 100644 index 00000000000..aacd171fafd --- /dev/null +++ b/tests/components/tado/fixtures/device_authorize.json @@ -0,0 +1,8 @@ +{ + "device_code": "ABCD", + "expires_in": 300, + "interval": 5, + "user_code": "TEST", + "verification_uri": "https://login.tado.com/oauth2/device", + "verification_uri_complete": "https://login.tado.com/oauth2/device?user_code=TEST" +} diff --git a/tests/components/tado/test_config_flow.py b/tests/components/tado/test_config_flow.py index 19acb0aecbd..2fd8e6a0468 100644 --- a/tests/components/tado/test_config_flow.py +++ b/tests/components/tado/test_config_flow.py @@ -1,20 +1,20 @@ """Test the Tado config flow.""" -from http import HTTPStatus from ipaddress import ip_address -from unittest.mock import MagicMock, patch +import threading +from unittest.mock import AsyncMock, MagicMock, patch -import PyTado +from PyTado.http import DeviceActivationStatus import pytest -import requests -from homeassistant import config_entries -from homeassistant.components.tado.config_flow import NoHomes +from homeassistant.components.tado.config_flow import TadoException from homeassistant.components.tado.const import ( CONF_FALLBACK, + CONF_REFRESH_TOKEN, CONST_OVERLAY_TADO_DEFAULT, DOMAIN, ) +from homeassistant.config_entries import SOURCE_HOMEKIT, SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -26,92 +26,186 @@ from homeassistant.helpers.service_info.zeroconf import ( from tests.common import MockConfigEntry -def _get_mock_tado_api(get_me=None) -> MagicMock: - mock_tado = MagicMock() - if isinstance(get_me, Exception): - type(mock_tado).get_me = MagicMock(side_effect=get_me) - else: - type(mock_tado).get_me = MagicMock(return_value=get_me) - return mock_tado +async def test_full_flow( + hass: HomeAssistant, + mock_tado_api: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test the full flow of the config flow.""" + + event = threading.Event() + + def mock_tado_api_device_activation() -> None: + # Simulate the device activation process + event.wait(timeout=5) + + mock_tado_api.device_activation = mock_tado_api_device_activation + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "user" + + event.set() + await hass.async_block_till_done() + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "home name" + assert result["data"] == {CONF_REFRESH_TOKEN: "refresh"} + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_full_flow_reauth( + hass: HomeAssistant, + mock_tado_api: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test the full flow of the config when reauthticating.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id="ABC-123-DEF-456", + data={CONF_REFRESH_TOKEN: "totally_refresh_for_reauth"}, + ) + entry.add_to_hass(hass) + + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + # The no user input + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + event = threading.Event() + + def mock_tado_api_device_activation() -> None: + # Simulate the device activation process + event.wait(timeout=5) + + mock_tado_api.device_activation = mock_tado_api_device_activation + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "user" + + event.set() + await hass.async_block_till_done() + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "home name" + assert result["data"] == {CONF_REFRESH_TOKEN: "refresh"} + + +async def test_auth_timeout( + hass: HomeAssistant, + mock_tado_api: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test the auth timeout.""" + mock_tado_api.device_activation_status.return_value = DeviceActivationStatus.PENDING + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS_DONE + assert result["step_id"] == "timeout" + + mock_tado_api.device_activation_status.return_value = ( + DeviceActivationStatus.COMPLETED + ) + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "timeout" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "home name" + assert result["data"] == {CONF_REFRESH_TOKEN: "refresh"} + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_no_homes(hass: HomeAssistant, mock_tado_api: MagicMock) -> None: + """Test the full flow of the config flow.""" + mock_tado_api.get_me.return_value["homes"] = [] + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS_DONE + assert result["step_id"] == "finish_login" + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "no_homes" + + +async def test_tado_creation(hass: HomeAssistant) -> None: + """Test we handle Form Exceptions.""" + + with patch( + "homeassistant.components.tado.config_flow.Tado", + side_effect=TadoException("Test exception"), + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" @pytest.mark.parametrize( ("exception", "error"), [ - (KeyError, "invalid_auth"), - (RuntimeError, "cannot_connect"), - (ValueError, "unknown"), + (Exception, "timeout"), + (TadoException, "timeout"), ], ) -async def test_form_exceptions( - hass: HomeAssistant, exception: Exception, error: str +async def test_wait_for_login_exception( + hass: HomeAssistant, + mock_tado_api: MagicMock, + exception: Exception, + error: str, ) -> None: - """Test we handle Form Exceptions.""" + """Test that an exception in wait for login is handled properly.""" + mock_tado_api.device_activation.side_effect = exception + result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) - - with patch( - "homeassistant.components.tado.config_flow.Tado", - side_effect=exception, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"username": "test-username", "password": "test-password"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": error} - - # Test a retry to recover, upon failure - mock_tado_api = _get_mock_tado_api(get_me={"homes": [{"id": 1, "name": "myhome"}]}) - - with ( - patch( - "homeassistant.components.tado.config_flow.Tado", - return_value=mock_tado_api, - ), - patch( - "homeassistant.components.tado.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"username": "test-username", "password": "test-password"}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "myhome" - assert result["data"] == { - "username": "test-username", - "password": "test-password", - } - assert len(mock_setup_entry.mock_calls) == 1 + # @joostlek: I think the timeout step is not rightfully named, but heck, it works + assert result["type"] is FlowResultType.SHOW_PROGRESS_DONE + assert result["step_id"] == error -async def test_options_flow(hass: HomeAssistant) -> None: +async def test_options_flow( + hass: HomeAssistant, + mock_tado_api: MagicMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: """Test config flow options.""" - entry = MockConfigEntry(domain=DOMAIN, data={"username": "test-username"}) - entry.add_to_hass(hass) + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - with patch( - "homeassistant.components.tado.async_setup_entry", - return_value=True, - ): - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - result = await hass.config_entries.options.async_init( - entry.entry_id, context={"source": config_entries.SOURCE_USER} - ) + result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "init" @@ -119,125 +213,17 @@ async def test_options_flow(hass: HomeAssistant) -> None: result["flow_id"], {CONF_FALLBACK: CONST_OVERLAY_TADO_DEFAULT}, ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == {CONF_FALLBACK: CONST_OVERLAY_TADO_DEFAULT} -async def test_create_entry(hass: HomeAssistant) -> None: - """Test we can setup though the user path.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - mock_tado_api = _get_mock_tado_api(get_me={"homes": [{"id": 1, "name": "myhome"}]}) - - with ( - patch( - "homeassistant.components.tado.config_flow.Tado", - return_value=mock_tado_api, - ), - patch( - "homeassistant.components.tado.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"username": "test-username", "password": "test-password"}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "myhome" - assert result["data"] == { - "username": "test-username", - "password": "test-password", - } - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_form_invalid_auth(hass: HomeAssistant) -> None: - """Test we handle invalid auth.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - response_mock = MagicMock() - type(response_mock).status_code = HTTPStatus.UNAUTHORIZED - mock_tado_api = _get_mock_tado_api( - get_me=requests.HTTPError(response=response_mock) - ) - - with patch( - "homeassistant.components.tado.config_flow.Tado", - return_value=mock_tado_api, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"username": "test-username", "password": "test-password"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "invalid_auth"} - - -async def test_form_cannot_connect(hass: HomeAssistant) -> None: - """Test we handle cannot connect error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - response_mock = MagicMock() - type(response_mock).status_code = HTTPStatus.INTERNAL_SERVER_ERROR - mock_tado_api = _get_mock_tado_api( - get_me=requests.HTTPError(response=response_mock) - ) - - with patch( - "homeassistant.components.tado.config_flow.Tado", - return_value=mock_tado_api, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"username": "test-username", "password": "test-password"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "cannot_connect"} - - -async def test_no_homes(hass: HomeAssistant) -> None: - """Test we handle no homes error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - mock_tado_api = _get_mock_tado_api(get_me={"homes": []}) - - with patch( - "homeassistant.components.tado.config_flow.Tado", - return_value=mock_tado_api, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"username": "test-username", "password": "test-password"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "no_homes"} - - -async def test_form_homekit(hass: HomeAssistant) -> None: +async def test_homekit(hass: HomeAssistant, mock_tado_api: MagicMock) -> None: """Test that we abort from homekit if tado is already setup.""" result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_HOMEKIT}, + context={"source": SOURCE_HOMEKIT}, data=ZeroconfServiceInfo( ip_address=ip_address("127.0.0.1"), ip_addresses=[ip_address("127.0.0.1")], @@ -249,13 +235,18 @@ async def test_form_homekit(hass: HomeAssistant) -> None: ), ) assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - flow = next( - flow - for flow in hass.config_entries.flow.async_progress() - if flow["flow_id"] == result["flow_id"] - ) - assert flow["context"]["unique_id"] == "AA:BB:CC:DD:EE:FF" + assert result["step_id"] == "homekit_confirm" + + result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].unique_id == "1" + + +async def test_homekit_already_setup( + hass: HomeAssistant, mock_tado_api: MagicMock +) -> None: + """Test that we abort from homekit if tado is already setup.""" entry = MockConfigEntry( domain=DOMAIN, data={CONF_USERNAME: "mock", CONF_PASSWORD: "mock"} @@ -264,7 +255,7 @@ async def test_form_homekit(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_HOMEKIT}, + context={"source": SOURCE_HOMEKIT}, data=ZeroconfServiceInfo( ip_address=ip_address("127.0.0.1"), ip_addresses=[ip_address("127.0.0.1")], @@ -276,77 +267,4 @@ async def test_form_homekit(hass: HomeAssistant) -> None: ), ) assert result["type"] is FlowResultType.ABORT - - -@pytest.mark.parametrize( - ("exception", "error"), - [ - (PyTado.exceptions.TadoWrongCredentialsException, "invalid_auth"), - (RuntimeError, "cannot_connect"), - (NoHomes, "no_homes"), - (ValueError, "unknown"), - ], -) -async def test_reconfigure_flow( - hass: HomeAssistant, exception: Exception, error: str -) -> None: - """Test re-configuration flow.""" - entry = MockConfigEntry( - domain=DOMAIN, - data={ - "username": "test-username", - "password": "test-password", - "home_id": 1, - }, - unique_id="unique_id", - ) - entry.add_to_hass(hass) - - result = await entry.start_reconfigure_flow(hass) - - assert result["type"] is FlowResultType.FORM - - with patch( - "homeassistant.components.tado.config_flow.Tado", - side_effect=exception, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_PASSWORD: "test-password", - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": error} - - mock_tado_api = _get_mock_tado_api(get_me={"homes": [{"id": 1, "name": "myhome"}]}) - with ( - patch( - "homeassistant.components.tado.config_flow.Tado", - return_value=mock_tado_api, - ), - patch( - "homeassistant.components.tado.async_setup_entry", - return_value=True, - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_PASSWORD: "test-password", - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reconfigure_successful" - entry = hass.config_entries.async_get_entry(entry.entry_id) - assert entry - assert entry.title == "Mock Title" - assert entry.data == { - "username": "test-username", - "password": "test-password", - "home_id": 1, - } + assert result["reason"] == "already_configured" diff --git a/tests/components/tado/test_helper.py b/tests/components/tado/test_helper.py index da959c2124a..7f798e3797c 100644 --- a/tests/components/tado/test_helper.py +++ b/tests/components/tado/test_helper.py @@ -5,7 +5,7 @@ from unittest.mock import MagicMock, patch from PyTado.interface import Tado import pytest -from homeassistant.components.tado import TadoDataUpdateCoordinator +from homeassistant.components.tado import CONF_REFRESH_TOKEN, TadoDataUpdateCoordinator from homeassistant.components.tado.const import ( CONST_OVERLAY_MANUAL, CONST_OVERLAY_TADO_DEFAULT, @@ -28,13 +28,13 @@ def entry(request: pytest.FixtureRequest) -> MockConfigEntry: request.param if hasattr(request, "param") else CONST_OVERLAY_TADO_DEFAULT ) return MockConfigEntry( - version=1, - minor_version=1, + version=2, domain=DOMAIN, title="Tado", data={ CONF_USERNAME: "test-username", CONF_PASSWORD: "test-password", + CONF_REFRESH_TOKEN: "test-refresh", }, options={ "fallback": fallback, diff --git a/tests/components/tado/test_init.py b/tests/components/tado/test_init.py new file mode 100644 index 00000000000..2f2ccacf3c0 --- /dev/null +++ b/tests/components/tado/test_init.py @@ -0,0 +1,30 @@ +"""Test the Tado integration.""" + +from homeassistant.components.tado import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_v1_migration(hass: HomeAssistant) -> None: + """Test migration from v1 to v2 config entry.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_USERNAME: "test", + CONF_PASSWORD: "test", + }, + unique_id="1", + version=1, + ) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.version == 2 + assert CONF_USERNAME not in entry.data + assert CONF_PASSWORD not in entry.data + + assert entry.state is ConfigEntryState.SETUP_ERROR + assert len(hass.config_entries.flow.async_progress()) == 1 diff --git a/tests/components/tado/util.py b/tests/components/tado/util.py index 5bf87dbed33..6fd333dff51 100644 --- a/tests/components/tado/util.py +++ b/tests/components/tado/util.py @@ -2,8 +2,7 @@ import requests_mock -from homeassistant.components.tado import DOMAIN -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.components.tado import CONF_REFRESH_TOKEN, DOMAIN from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_fixture @@ -178,9 +177,16 @@ async def async_init_integration( "https://my.tado.com/api/v2/homes/1/zones/1/state", text=load_fixture(zone_1_state_fixture), ) + m.post( + "https://login.tado.com/oauth2/token", + text=load_fixture(token_fixture), + ) entry = MockConfigEntry( domain=DOMAIN, - data={CONF_USERNAME: "mock", CONF_PASSWORD: "mock"}, + version=2, + data={ + CONF_REFRESH_TOKEN: "mock-token", + }, options={"fallback": "NEXT_TIME_BLOCK"}, ) entry.add_to_hass(hass) diff --git a/tests/components/template/test_switch.py b/tests/components/template/test_switch.py index f0dbe43b51e..d8877851efe 100644 --- a/tests/components/template/test_switch.py +++ b/tests/components/template/test_switch.py @@ -8,6 +8,7 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.components import switch, template from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.components.template.switch import rewrite_legacy_to_modern_conf +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, @@ -17,6 +18,7 @@ from homeassistant.const import ( STATE_UNAVAILABLE, ) from homeassistant.core import CoreState, HomeAssistant, ServiceCall, State +from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.template import Template from homeassistant.setup import async_setup_component @@ -29,6 +31,7 @@ from tests.common import ( mock_component, mock_restore_cache, ) +from tests.typing import WebSocketGenerator TEST_OBJECT_ID = "test_template_switch" TEST_ENTITY_ID = f"switch.{TEST_OBJECT_ID}" @@ -279,6 +282,46 @@ async def test_setup_config_entry( assert state == snapshot +@pytest.mark.parametrize("state_key", ["value_template", "state"]) +async def test_flow_preview( + hass: HomeAssistant, + state_key: str, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test the config flow preview.""" + client = await hass_ws_client(hass) + + result = await hass.config_entries.flow.async_init( + template.DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": SWITCH_DOMAIN}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == SWITCH_DOMAIN + assert result["errors"] is None + assert result["preview"] == "template" + + await client.send_json_auto_id( + { + "type": "template/start_preview", + "flow_id": result["flow_id"], + "flow_type": "config_flow", + "user_input": {"name": "My template", state_key: "{{ 'on' }}"}, + } + ) + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] is None + + msg = await client.receive_json() + assert msg["event"]["state"] == "on" + + @pytest.mark.parametrize( ("count", "state_template"), [(1, "{{ states.switch.test_state.state }}")] ) diff --git a/tests/components/tesla_fleet/conftest.py b/tests/components/tesla_fleet/conftest.py index 06d2b54c936..10b01caca96 100644 --- a/tests/components/tesla_fleet/conftest.py +++ b/tests/components/tesla_fleet/conftest.py @@ -1,4 +1,4 @@ -"""Fixtures for Tessie.""" +"""Fixtures for Tesla Fleet.""" from __future__ import annotations @@ -113,7 +113,7 @@ def mock_products() -> Generator[AsyncMock]: def mock_vehicle_state() -> Generator[AsyncMock]: """Mock Tesla Fleet API Vehicle Specific vehicle method.""" with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.vehicle", + "tesla_fleet_api.tesla.VehicleFleet.vehicle", return_value=VEHICLE_ONLINE, ) as mock_vehicle: yield mock_vehicle @@ -123,7 +123,7 @@ def mock_vehicle_state() -> Generator[AsyncMock]: def mock_vehicle_data() -> Generator[AsyncMock]: """Mock Tesla Fleet API Vehicle Specific vehicle_data method.""" with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.vehicle_data", + "tesla_fleet_api.tesla.VehicleFleet.vehicle_data", return_value=VEHICLE_DATA, ) as mock_vehicle_data: yield mock_vehicle_data @@ -133,7 +133,7 @@ def mock_vehicle_data() -> Generator[AsyncMock]: def mock_wake_up() -> Generator[AsyncMock]: """Mock Tesla Fleet API Vehicle Specific wake_up method.""" with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.wake_up", + "tesla_fleet_api.tesla.VehicleFleet.wake_up", return_value=VEHICLE_ONLINE, ) as mock_wake_up: yield mock_wake_up @@ -143,7 +143,7 @@ def mock_wake_up() -> Generator[AsyncMock]: def mock_live_status() -> Generator[AsyncMock]: """Mock Tesla Fleet API Energy Specific live_status method.""" with patch( - "homeassistant.components.tesla_fleet.EnergySpecific.live_status", + "tesla_fleet_api.tesla.EnergySite.live_status", side_effect=lambda: deepcopy(LIVE_STATUS), ) as mock_live_status: yield mock_live_status @@ -153,7 +153,7 @@ def mock_live_status() -> Generator[AsyncMock]: def mock_site_info() -> Generator[AsyncMock]: """Mock Tesla Fleet API Energy Specific site_info method.""" with patch( - "homeassistant.components.tesla_fleet.EnergySpecific.site_info", + "tesla_fleet_api.tesla.EnergySite.site_info", side_effect=lambda: deepcopy(SITE_INFO), ) as mock_live_status: yield mock_live_status @@ -182,7 +182,7 @@ def mock_request(): def mock_energy_history(): """Mock Teslemetry Energy Specific site_info method.""" with patch( - "homeassistant.components.teslemetry.EnergySpecific.energy_history", + "tesla_fleet_api.tesla.EnergySite.energy_history", return_value=ENERGY_HISTORY, ) as mock_live_status: yield mock_live_status @@ -192,7 +192,7 @@ def mock_energy_history(): def mock_signed_command() -> Generator[AsyncMock]: """Mock Tesla Fleet Api signed_command method.""" with patch( - "homeassistant.components.tesla_fleet.VehicleSigned.signed_command", + "tesla_fleet_api.tesla.VehicleSigned.signed_command", return_value=COMMAND_OK, ) as mock_signed_command: yield mock_signed_command diff --git a/tests/components/tesla_fleet/test_button.py b/tests/components/tesla_fleet/test_button.py index ef1cfd90357..d43f7448379 100644 --- a/tests/components/tesla_fleet/test_button.py +++ b/tests/components/tesla_fleet/test_button.py @@ -56,7 +56,7 @@ async def test_press( await setup_platform(hass, normal_config_entry, [Platform.BUTTON]) with patch( - f"homeassistant.components.tesla_fleet.VehicleSpecific.{func}", + f"tesla_fleet_api.tesla.VehicleFleet.{func}", return_value=COMMAND_OK, ) as command: await hass.services.async_call( @@ -85,7 +85,7 @@ async def test_press_signing_error( with ( patch("homeassistant.components.tesla_fleet.TeslaFleetApi.get_private_key"), patch( - "homeassistant.components.tesla_fleet.VehicleSigned.flash_lights", + "tesla_fleet_api.tesla.VehicleSigned.flash_lights", side_effect=NotOnWhitelistFault, ), pytest.raises(HomeAssistantError) as error, diff --git a/tests/components/tesla_fleet/test_climate.py b/tests/components/tesla_fleet/test_climate.py index b45e5259a5c..fae79c795c2 100644 --- a/tests/components/tesla_fleet/test_climate.py +++ b/tests/components/tesla_fleet/test_climate.py @@ -257,7 +257,7 @@ async def test_invalid_error( with ( patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.auto_conditioning_start", + "tesla_fleet_api.tesla.VehicleFleet.auto_conditioning_start", side_effect=InvalidCommand, ) as mock_on, pytest.raises( @@ -285,7 +285,7 @@ async def test_errors( with ( patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.auto_conditioning_start", + "tesla_fleet_api.tesla.VehicleFleet.auto_conditioning_start", return_value=response, ) as mock_on, pytest.raises(HomeAssistantError), @@ -308,7 +308,7 @@ async def test_ignored_error( await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) entity_id = "climate.test_climate" with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.auto_conditioning_start", + "tesla_fleet_api.tesla.VehicleFleet.auto_conditioning_start", return_value=COMMAND_IGNORED_REASON, ) as mock_on: await hass.services.async_call( diff --git a/tests/components/tesla_fleet/test_cover.py b/tests/components/tesla_fleet/test_cover.py index ac5307b2fdd..15d14f34a87 100644 --- a/tests/components/tesla_fleet/test_cover.py +++ b/tests/components/tesla_fleet/test_cover.py @@ -89,7 +89,7 @@ async def test_cover_services( # Vent Windows entity_id = "cover.test_windows" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.window_control", + "tesla_fleet_api.tesla.VehicleFleet.window_control", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -118,7 +118,7 @@ async def test_cover_services( # Charge Port Door entity_id = "cover.test_charge_port_door" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.charge_port_door_open", + "tesla_fleet_api.tesla.VehicleFleet.charge_port_door_open", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -133,7 +133,7 @@ async def test_cover_services( assert state.state == CoverState.OPEN with patch( - "homeassistant.components.teslemetry.VehicleSpecific.charge_port_door_close", + "tesla_fleet_api.tesla.VehicleFleet.charge_port_door_close", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -150,7 +150,7 @@ async def test_cover_services( # Frunk entity_id = "cover.test_frunk" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.actuate_trunk", + "tesla_fleet_api.tesla.VehicleFleet.actuate_trunk", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -167,7 +167,7 @@ async def test_cover_services( # Trunk entity_id = "cover.test_trunk" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.actuate_trunk", + "tesla_fleet_api.tesla.VehicleFleet.actuate_trunk", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -196,7 +196,7 @@ async def test_cover_services( # Sunroof entity_id = "cover.test_sunroof" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.sun_roof_control", + "tesla_fleet_api.tesla.VehicleFleet.sun_roof_control", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/tesla_fleet/test_lock.py b/tests/components/tesla_fleet/test_lock.py index 00b77aefcaf..ac9a7b49b55 100644 --- a/tests/components/tesla_fleet/test_lock.py +++ b/tests/components/tesla_fleet/test_lock.py @@ -59,7 +59,7 @@ async def test_lock_services( entity_id = "lock.test_lock" with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.door_lock", + "tesla_fleet_api.tesla.VehicleFleet.door_lock", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -73,7 +73,7 @@ async def test_lock_services( call.assert_called_once() with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.door_unlock", + "tesla_fleet_api.tesla.VehicleFleet.door_unlock", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -97,7 +97,7 @@ async def test_lock_services( ) with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.charge_port_door_open", + "tesla_fleet_api.tesla.VehicleFleet.charge_port_door_open", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/tesla_fleet/test_media_player.py b/tests/components/tesla_fleet/test_media_player.py index 4c833e7499f..b2900d96c80 100644 --- a/tests/components/tesla_fleet/test_media_player.py +++ b/tests/components/tesla_fleet/test_media_player.py @@ -88,7 +88,7 @@ async def test_media_player_services( entity_id = "media_player.test_media_player" with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.adjust_volume", + "tesla_fleet_api.tesla.VehicleFleet.adjust_volume", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -102,7 +102,7 @@ async def test_media_player_services( call.assert_called_once() with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.media_toggle_playback", + "tesla_fleet_api.tesla.VehicleFleet.media_toggle_playback", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -117,7 +117,7 @@ async def test_media_player_services( # This test will fail without the previous call to pause playback with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.media_toggle_playback", + "tesla_fleet_api.tesla.VehicleFleet.media_toggle_playback", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -131,7 +131,7 @@ async def test_media_player_services( call.assert_called_once() with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.media_next_track", + "tesla_fleet_api.tesla.VehicleFleet.media_next_track", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -144,7 +144,7 @@ async def test_media_player_services( call.assert_called_once() with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.media_prev_track", + "tesla_fleet_api.tesla.VehicleFleet.media_prev_track", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/tesla_fleet/test_number.py b/tests/components/tesla_fleet/test_number.py index 8551a99ee29..4ade98852c8 100644 --- a/tests/components/tesla_fleet/test_number.py +++ b/tests/components/tesla_fleet/test_number.py @@ -57,7 +57,7 @@ async def test_number_services( entity_id = "number.test_charge_current" with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.set_charging_amps", + "tesla_fleet_api.tesla.VehicleFleet.set_charging_amps", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -72,7 +72,7 @@ async def test_number_services( entity_id = "number.test_charge_limit" with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.set_charge_limit", + "tesla_fleet_api.tesla.VehicleFleet.set_charge_limit", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -87,7 +87,7 @@ async def test_number_services( entity_id = "number.energy_site_backup_reserve" with patch( - "homeassistant.components.tesla_fleet.EnergySpecific.backup", + "tesla_fleet_api.tesla.EnergySite.backup", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -105,7 +105,7 @@ async def test_number_services( entity_id = "number.energy_site_off_grid_reserve" with patch( - "homeassistant.components.tesla_fleet.EnergySpecific.off_grid_vehicle_charging_reserve", + "tesla_fleet_api.tesla.EnergySite.off_grid_vehicle_charging_reserve", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/tesla_fleet/test_select.py b/tests/components/tesla_fleet/test_select.py index 902b28ddb7a..f06d67041c9 100644 --- a/tests/components/tesla_fleet/test_select.py +++ b/tests/components/tesla_fleet/test_select.py @@ -61,11 +61,11 @@ async def test_select_services( entity_id = "select.test_seat_heater_front_left" with ( patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.remote_seat_heater_request", + "tesla_fleet_api.tesla.VehicleFleet.remote_seat_heater_request", return_value=COMMAND_OK, ) as remote_seat_heater_request, patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.auto_conditioning_start", + "tesla_fleet_api.tesla.VehicleFleet.auto_conditioning_start", return_value=COMMAND_OK, ) as auto_conditioning_start, ): @@ -83,11 +83,11 @@ async def test_select_services( entity_id = "select.test_steering_wheel_heater" with ( patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.remote_steering_wheel_heat_level_request", + "tesla_fleet_api.tesla.VehicleFleet.remote_steering_wheel_heat_level_request", return_value=COMMAND_OK, ) as remote_steering_wheel_heat_level_request, patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.auto_conditioning_start", + "tesla_fleet_api.tesla.VehicleFleet.auto_conditioning_start", return_value=COMMAND_OK, ) as auto_conditioning_start, ): @@ -104,7 +104,7 @@ async def test_select_services( entity_id = "select.energy_site_operation_mode" with patch( - "homeassistant.components.tesla_fleet.EnergySpecific.operation", + "tesla_fleet_api.tesla.EnergySite.operation", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -122,7 +122,7 @@ async def test_select_services( entity_id = "select.energy_site_allow_export" with patch( - "homeassistant.components.tesla_fleet.EnergySpecific.grid_import_export", + "tesla_fleet_api.tesla.EnergySite.grid_import_export", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/tesla_fleet/test_switch.py b/tests/components/tesla_fleet/test_switch.py index fba4fc05cc4..022c3a0ab18 100644 --- a/tests/components/tesla_fleet/test_switch.py +++ b/tests/components/tesla_fleet/test_switch.py @@ -71,41 +71,41 @@ async def test_switch_offline( @pytest.mark.parametrize( ("name", "on", "off"), [ - ("test_charge", "VehicleSpecific.charge_start", "VehicleSpecific.charge_stop"), + ("test_charge", "VehicleFleet.charge_start", "VehicleFleet.charge_stop"), ( "test_auto_seat_climate_left", - "VehicleSpecific.remote_auto_seat_climate_request", - "VehicleSpecific.remote_auto_seat_climate_request", + "VehicleFleet.remote_auto_seat_climate_request", + "VehicleFleet.remote_auto_seat_climate_request", ), ( "test_auto_seat_climate_right", - "VehicleSpecific.remote_auto_seat_climate_request", - "VehicleSpecific.remote_auto_seat_climate_request", + "VehicleFleet.remote_auto_seat_climate_request", + "VehicleFleet.remote_auto_seat_climate_request", ), ( "test_auto_steering_wheel_heater", - "VehicleSpecific.remote_auto_steering_wheel_heat_climate_request", - "VehicleSpecific.remote_auto_steering_wheel_heat_climate_request", + "VehicleFleet.remote_auto_steering_wheel_heat_climate_request", + "VehicleFleet.remote_auto_steering_wheel_heat_climate_request", ), ( "test_defrost", - "VehicleSpecific.set_preconditioning_max", - "VehicleSpecific.set_preconditioning_max", + "VehicleFleet.set_preconditioning_max", + "VehicleFleet.set_preconditioning_max", ), ( "energy_site_storm_watch", - "EnergySpecific.storm_mode", - "EnergySpecific.storm_mode", + "EnergySite.storm_mode", + "EnergySite.storm_mode", ), ( "energy_site_allow_charging_from_grid", - "EnergySpecific.grid_import_export", - "EnergySpecific.grid_import_export", + "EnergySite.grid_import_export", + "EnergySite.grid_import_export", ), ( "test_sentry_mode", - "VehicleSpecific.set_sentry_mode", - "VehicleSpecific.set_sentry_mode", + "VehicleFleet.set_sentry_mode", + "VehicleFleet.set_sentry_mode", ), ], ) @@ -122,7 +122,7 @@ async def test_switch_services( entity_id = f"switch.{name}" with patch( - f"homeassistant.components.tesla_fleet.{on}", + f"tesla_fleet_api.tesla.{on}", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -136,7 +136,7 @@ async def test_switch_services( call.assert_called_once() with patch( - f"homeassistant.components.tesla_fleet.{off}", + f"tesla_fleet_api.tesla.{off}", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/teslemetry/conftest.py b/tests/components/teslemetry/conftest.py index e89bab9eff1..0152543e512 100644 --- a/tests/components/teslemetry/conftest.py +++ b/tests/components/teslemetry/conftest.py @@ -25,7 +25,7 @@ from .const import ( def mock_metadata(): """Mock Tesla Fleet Api metadata method.""" with patch( - "homeassistant.components.teslemetry.Teslemetry.metadata", return_value=METADATA + "tesla_fleet_api.teslemetry.Teslemetry.metadata", return_value=METADATA ) as mock_products: yield mock_products @@ -34,7 +34,7 @@ def mock_metadata(): def mock_products(): """Mock Tesla Fleet Api products method.""" with patch( - "homeassistant.components.teslemetry.Teslemetry.products", return_value=PRODUCTS + "tesla_fleet_api.teslemetry.Teslemetry.products", return_value=PRODUCTS ) as mock_products: yield mock_products @@ -43,7 +43,7 @@ def mock_products(): def mock_vehicle_data() -> Generator[AsyncMock]: """Mock Tesla Fleet API Vehicle Specific vehicle_data method.""" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.vehicle_data", + "tesla_fleet_api.teslemetry.Vehicle.vehicle_data", return_value=VEHICLE_DATA, ) as mock_vehicle_data: yield mock_vehicle_data @@ -53,7 +53,7 @@ def mock_vehicle_data() -> Generator[AsyncMock]: def mock_legacy(): """Mock Tesla Fleet Api products method.""" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.pre2021", return_value=True + "tesla_fleet_api.teslemetry.Vehicle.pre2021", return_value=True ) as mock_pre2021: yield mock_pre2021 @@ -62,7 +62,7 @@ def mock_legacy(): def mock_wake_up(): """Mock Tesla Fleet API Vehicle Specific wake_up method.""" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.wake_up", + "tesla_fleet_api.teslemetry.Vehicle.wake_up", return_value=WAKE_UP_ONLINE, ) as mock_wake_up: yield mock_wake_up @@ -72,7 +72,7 @@ def mock_wake_up(): def mock_vehicle() -> Generator[AsyncMock]: """Mock Tesla Fleet API Vehicle Specific vehicle method.""" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.vehicle", + "tesla_fleet_api.teslemetry.Vehicle.vehicle", return_value=WAKE_UP_ONLINE, ) as mock_vehicle: yield mock_vehicle @@ -82,7 +82,7 @@ def mock_vehicle() -> Generator[AsyncMock]: def mock_request(): """Mock Tesla Fleet API Vehicle Specific class.""" with patch( - "homeassistant.components.teslemetry.Teslemetry._request", + "tesla_fleet_api.teslemetry.Teslemetry._request", return_value=COMMAND_OK, ) as mock_request: yield mock_request @@ -92,7 +92,7 @@ def mock_request(): def mock_live_status(): """Mock Teslemetry Energy Specific live_status method.""" with patch( - "homeassistant.components.teslemetry.EnergySpecific.live_status", + "tesla_fleet_api.tesla.energysite.EnergySite.live_status", side_effect=lambda: deepcopy(LIVE_STATUS), ) as mock_live_status: yield mock_live_status @@ -102,7 +102,7 @@ def mock_live_status(): def mock_site_info(): """Mock Teslemetry Energy Specific site_info method.""" with patch( - "homeassistant.components.teslemetry.EnergySpecific.site_info", + "tesla_fleet_api.tesla.energysite.EnergySite.site_info", side_effect=lambda: deepcopy(SITE_INFO), ) as mock_live_status: yield mock_live_status @@ -112,7 +112,7 @@ def mock_site_info(): def mock_energy_history(): """Mock Teslemetry Energy Specific site_info method.""" with patch( - "homeassistant.components.teslemetry.EnergySpecific.energy_history", + "tesla_fleet_api.tesla.energysite.EnergySite.energy_history", return_value=ENERGY_HISTORY, ) as mock_live_status: yield mock_live_status @@ -122,7 +122,7 @@ def mock_energy_history(): def mock_add_listener(): """Mock Teslemetry Stream listen method.""" with patch( - "homeassistant.components.teslemetry.TeslemetryStream.async_add_listener", + "teslemetry_stream.TeslemetryStream.async_add_listener", ) as mock_add_listener: mock_add_listener.listeners = [] @@ -165,7 +165,7 @@ def mock_stream_update_config(): def mock_stream_connected(): """Mock Teslemetry Stream listen method.""" with patch( - "homeassistant.components.teslemetry.TeslemetryStream.connected", + "teslemetry_stream.TeslemetryStream.connected", return_value=True, ) as mock_stream_connected: yield mock_stream_connected diff --git a/tests/components/teslemetry/snapshots/test_climate.ambr b/tests/components/teslemetry/snapshots/test_climate.ambr index 4c265c00cb8..e0e68f23c79 100644 --- a/tests/components/teslemetry/snapshots/test_climate.ambr +++ b/tests/components/teslemetry/snapshots/test_climate.ambr @@ -1,10 +1,4 @@ # serializer version: 1 -# name: test_asleep_or_offline[HomeAssistantError] - 'Timed out trying to wake up vehicle' -# --- -# name: test_asleep_or_offline[InvalidCommand] - 'Failed to wake up vehicle: The data request or command is unknown.' -# --- # name: test_climate[climate.test_cabin_overheat_protection-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -78,6 +72,10 @@ }), 'area_id': None, 'capabilities': dict({ + 'fan_modes': list([ + 'off', + 'bioweapon', + ]), 'hvac_modes': list([ , , @@ -113,7 +111,7 @@ 'original_name': 'Climate', 'platform': 'teslemetry', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': , 'unique_id': 'LRW3F7EK4NC700000-driver_temp', 'unit_of_measurement': None, @@ -123,6 +121,11 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'current_temperature': 30.0, + 'fan_mode': 'off', + 'fan_modes': list([ + 'off', + 'bioweapon', + ]), 'friendly_name': 'Test Climate', 'hvac_modes': list([ , @@ -137,7 +140,7 @@ 'dog', 'camp', ]), - 'supported_features': , + 'supported_features': , 'temperature': 22.0, }), 'context': , @@ -220,6 +223,10 @@ }), 'area_id': None, 'capabilities': dict({ + 'fan_modes': list([ + 'off', + 'bioweapon', + ]), 'hvac_modes': list([ , , @@ -255,7 +262,7 @@ 'original_name': 'Climate', 'platform': 'teslemetry', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': , 'unique_id': 'LRW3F7EK4NC700000-driver_temp', 'unit_of_measurement': None, @@ -265,6 +272,11 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'current_temperature': 30.0, + 'fan_mode': 'off', + 'fan_modes': list([ + 'off', + 'bioweapon', + ]), 'friendly_name': 'Test Climate', 'hvac_modes': list([ , @@ -279,7 +291,7 @@ 'dog', 'camp', ]), - 'supported_features': , + 'supported_features': , 'temperature': 22.0, }), 'context': , @@ -297,7 +309,9 @@ 'area_id': None, 'capabilities': dict({ 'hvac_modes': list([ + , , + , ]), 'max_temp': 40, 'min_temp': 30, @@ -339,6 +353,7 @@ 'capabilities': dict({ 'hvac_modes': list([ , + , ]), 'max_temp': 28.0, 'min_temp': 15.0, @@ -374,3 +389,85 @@ # name: test_invalid_error[error] 'Command returned exception: The data request or command is unknown.' # --- +# name: test_select_streaming[climate.test_cabin_overheat_protection] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': None, + 'friendly_name': 'Test Cabin overheat protection', + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 40, + 'min_temp': 30, + 'supported_features': , + 'target_temp_step': 5, + 'temperature': None, + }), + 'context': , + 'entity_id': 'climate.test_cabin_overheat_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'cool', + }) +# --- +# name: test_select_streaming[climate.test_climate LHD] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 26.0, + 'friendly_name': 'Test Climate', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 15.0, + 'preset_mode': None, + 'preset_modes': list([ + 'off', + 'keep', + 'dog', + 'camp', + ]), + 'supported_features': , + 'temperature': 21.0, + }), + 'context': , + 'entity_id': 'climate.test_climate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat_cool', + }) +# --- +# name: test_select_streaming[climate.test_climate] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 26.0, + 'friendly_name': 'Test Climate', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 15.0, + 'preset_mode': None, + 'preset_modes': list([ + 'off', + 'keep', + 'dog', + 'camp', + ]), + 'supported_features': , + 'temperature': 21.0, + }), + 'context': , + 'entity_id': 'climate.test_climate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat_cool', + }) +# --- diff --git a/tests/components/teslemetry/test_button.py b/tests/components/teslemetry/test_button.py index 75f94342f1e..46db33ce913 100644 --- a/tests/components/teslemetry/test_button.py +++ b/tests/components/teslemetry/test_button.py @@ -42,7 +42,7 @@ async def test_press(hass: HomeAssistant, name: str, func: str) -> None: await setup_platform(hass, [Platform.BUTTON]) with patch( - f"homeassistant.components.teslemetry.VehicleSpecific.{func}", + f"tesla_fleet_api.teslemetry.Vehicle.{func}", return_value=COMMAND_OK, ) as command: await hass.services.async_call( diff --git a/tests/components/teslemetry/test_climate.py b/tests/components/teslemetry/test_climate.py index 33f2e134806..27bed45c51f 100644 --- a/tests/components/teslemetry/test_climate.py +++ b/tests/components/teslemetry/test_climate.py @@ -2,10 +2,10 @@ from unittest.mock import AsyncMock, patch -from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion from tesla_fleet_api.exceptions import InvalidCommand +from teslemetry_stream import Signal from homeassistant.components.climate import ( ATTR_HVAC_MODE, @@ -24,15 +24,12 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import entity_registry as er -from . import assert_entities, setup_platform +from . import assert_entities, reload_platform, setup_platform from .const import ( COMMAND_ERRORS, COMMAND_IGNORED_REASON, METADATA_NOSCOPE, VEHICLE_DATA_ALT, - VEHICLE_DATA_ASLEEP, - WAKE_UP_ASLEEP, - WAKE_UP_ONLINE, ) @@ -41,6 +38,7 @@ async def test_climate( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, + mock_legacy: AsyncMock, ) -> None: """Tests that the climate entity is correct.""" @@ -195,6 +193,7 @@ async def test_climate_alt( snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, mock_vehicle_data: AsyncMock, + mock_legacy: AsyncMock, ) -> None: """Tests that the climate entity is correct.""" @@ -211,7 +210,7 @@ async def test_invalid_error(hass: HomeAssistant, snapshot: SnapshotAssertion) - with ( patch( - "homeassistant.components.teslemetry.VehicleSpecific.auto_conditioning_start", + "tesla_fleet_api.teslemetry.Vehicle.auto_conditioning_start", side_effect=InvalidCommand, ) as mock_on, pytest.raises(HomeAssistantError) as error, @@ -235,7 +234,7 @@ async def test_errors(hass: HomeAssistant, response: str) -> None: with ( patch( - "homeassistant.components.teslemetry.VehicleSpecific.auto_conditioning_start", + "tesla_fleet_api.teslemetry.Vehicle.auto_conditioning_start", return_value=response, ) as mock_on, pytest.raises(HomeAssistantError), @@ -257,7 +256,7 @@ async def test_ignored_error( await setup_platform(hass, [Platform.CLIMATE]) entity_id = "climate.test_climate" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.auto_conditioning_start", + "tesla_fleet_api.teslemetry.Vehicle.auto_conditioning_start", return_value=COMMAND_IGNORED_REASON, ) as mock_on: await hass.services.async_call( @@ -269,71 +268,12 @@ async def test_ignored_error( mock_on.assert_called_once() -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_asleep_or_offline( - hass: HomeAssistant, - mock_vehicle_data: AsyncMock, - mock_wake_up: AsyncMock, - mock_vehicle: AsyncMock, - freezer: FrozenDateTimeFactory, - snapshot: SnapshotAssertion, -) -> None: - """Tests asleep is handled.""" - - mock_vehicle_data.return_value = VEHICLE_DATA_ASLEEP - await setup_platform(hass, [Platform.CLIMATE]) - entity_id = "climate.test_climate" - - # Run a command but fail trying to wake up the vehicle - mock_wake_up.side_effect = InvalidCommand - with pytest.raises(HomeAssistantError) as error: - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: [entity_id]}, - blocking=True, - ) - assert str(error.value) == snapshot(name="InvalidCommand") - mock_wake_up.assert_called_once() - - mock_wake_up.side_effect = None - mock_wake_up.reset_mock() - - # Run a command but timeout trying to wake up the vehicle - mock_wake_up.return_value = WAKE_UP_ASLEEP - mock_vehicle.return_value = WAKE_UP_ASLEEP - with ( - patch("homeassistant.components.teslemetry.helpers.asyncio.sleep"), - pytest.raises(HomeAssistantError) as error, - ): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: [entity_id]}, - blocking=True, - ) - assert str(error.value) == snapshot(name="HomeAssistantError") - mock_wake_up.assert_called_once() - mock_vehicle.assert_called() - - mock_wake_up.reset_mock() - mock_vehicle.reset_mock() - mock_wake_up.return_value = WAKE_UP_ONLINE - mock_vehicle.return_value = WAKE_UP_ONLINE - - # Run a command and wake up the vehicle immediately - await hass.services.async_call( - CLIMATE_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: [entity_id]}, blocking=True - ) - await hass.async_block_till_done() - mock_wake_up.assert_called_once() - - async def test_climate_noscope( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, mock_metadata: AsyncMock, + mock_legacy: AsyncMock, ) -> None: """Tests that the climate entity is correct.""" mock_metadata.return_value = METADATA_NOSCOPE @@ -363,3 +303,47 @@ async def test_climate_noscope( {ATTR_ENTITY_ID: [entity_id], ATTR_TEMPERATURE: 20}, blocking=True, ) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_select_streaming( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_vehicle_data: AsyncMock, + mock_add_listener: AsyncMock, +) -> None: + """Tests that the select entities with streaming are correct.""" + + entry = await setup_platform(hass, [Platform.CLIMATE]) + + # Stream update + mock_add_listener.send( + { + "vin": VEHICLE_DATA_ALT["response"]["vin"], + "data": { + Signal.INSIDE_TEMP: 26, + Signal.HVAC_AC_ENABLED: True, + Signal.CLIMATE_KEEPER_MODE: "ClimateKeeperModeOn", + Signal.RIGHT_HAND_DRIVE: True, + Signal.HVAC_LEFT_TEMPERATURE_REQUEST: 22, + Signal.HVAC_RIGHT_TEMPERATURE_REQUEST: 21, + Signal.CABIN_OVERHEAT_PROTECTION_MODE: "CabinOverheatProtectionModeStateOn", + Signal.CABIN_OVERHEAT_PROTECTION_TEMPERATURE_LIMIT: 35, + }, + "createdAt": "2024-10-04T10:45:17.537Z", + } + ) + await hass.async_block_till_done() + + assert hass.states.get("climate.test_climate") == snapshot( + name="climate.test_climate LHD" + ) + + await reload_platform(hass, entry, [Platform.CLIMATE]) + + # Assert the entities restored their values + for entity_id in ( + "climate.test_climate", + "climate.test_cabin_overheat_protection", + ): + assert hass.states.get(entity_id) == snapshot(name=entity_id) diff --git a/tests/components/teslemetry/test_cover.py b/tests/components/teslemetry/test_cover.py index 14af1e732fe..e3933931c9f 100644 --- a/tests/components/teslemetry/test_cover.py +++ b/tests/components/teslemetry/test_cover.py @@ -75,7 +75,7 @@ async def test_cover_services( # Vent Windows entity_id = "cover.test_windows" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.window_control", + "tesla_fleet_api.teslemetry.Vehicle.window_control", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -104,7 +104,7 @@ async def test_cover_services( # Charge Port Door entity_id = "cover.test_charge_port_door" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.charge_port_door_open", + "tesla_fleet_api.teslemetry.Vehicle.charge_port_door_open", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -119,7 +119,7 @@ async def test_cover_services( assert state.state == CoverState.OPEN with patch( - "homeassistant.components.teslemetry.VehicleSpecific.charge_port_door_close", + "tesla_fleet_api.teslemetry.Vehicle.charge_port_door_close", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -136,7 +136,7 @@ async def test_cover_services( # Frunk entity_id = "cover.test_frunk" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.actuate_trunk", + "tesla_fleet_api.teslemetry.Vehicle.actuate_trunk", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -153,7 +153,7 @@ async def test_cover_services( # Trunk entity_id = "cover.test_trunk" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.actuate_trunk", + "tesla_fleet_api.teslemetry.Vehicle.actuate_trunk", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -182,7 +182,7 @@ async def test_cover_services( # Sunroof entity_id = "cover.test_sunroof" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.sun_roof_control", + "tesla_fleet_api.teslemetry.Vehicle.sun_roof_control", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/teslemetry/test_init.py b/tests/components/teslemetry/test_init.py index 5481e6cc034..fcf9c76c939 100644 --- a/tests/components/teslemetry/test_init.py +++ b/tests/components/teslemetry/test_init.py @@ -2,17 +2,14 @@ from unittest.mock import AsyncMock -from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion from tesla_fleet_api.exceptions import ( InvalidToken, SubscriptionRequired, TeslaFleetError, - VehicleOffline, ) -from homeassistant.components.teslemetry.coordinator import VEHICLE_INTERVAL from homeassistant.components.teslemetry.models import TeslemetryData from homeassistant.config_entries import ConfigEntryState from homeassistant.const import STATE_OFF, STATE_ON, Platform @@ -22,8 +19,6 @@ from homeassistant.helpers import device_registry as dr from . import setup_platform from .const import VEHICLE_DATA_ALT -from tests.common import async_fire_time_changed - ERRORS = [ (InvalidToken, ConfigEntryState.SETUP_ERROR), (SubscriptionRequired, ConfigEntryState.SETUP_ERROR), @@ -69,22 +64,6 @@ async def test_devices( assert device == snapshot(name=f"{device.identifiers}") -async def test_vehicle_refresh_offline( - hass: HomeAssistant, mock_vehicle_data: AsyncMock, freezer: FrozenDateTimeFactory -) -> None: - """Test coordinator refresh with an error.""" - entry = await setup_platform(hass, [Platform.CLIMATE]) - assert entry.state is ConfigEntryState.LOADED - mock_vehicle_data.assert_called_once() - mock_vehicle_data.reset_mock() - - mock_vehicle_data.side_effect = VehicleOffline - freezer.tick(VEHICLE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - mock_vehicle_data.assert_called_once() - - @pytest.mark.parametrize(("side_effect", "state"), ERRORS) async def test_vehicle_refresh_error( hass: HomeAssistant, diff --git a/tests/components/teslemetry/test_lock.py b/tests/components/teslemetry/test_lock.py index 848eee82c39..a74d613859f 100644 --- a/tests/components/teslemetry/test_lock.py +++ b/tests/components/teslemetry/test_lock.py @@ -57,7 +57,7 @@ async def test_lock_services( entity_id = "lock.test_lock" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.door_lock", + "tesla_fleet_api.teslemetry.Vehicle.door_lock", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -71,7 +71,7 @@ async def test_lock_services( call.assert_called_once() with patch( - "homeassistant.components.teslemetry.VehicleSpecific.door_unlock", + "tesla_fleet_api.teslemetry.Vehicle.door_unlock", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -95,7 +95,7 @@ async def test_lock_services( ) with patch( - "homeassistant.components.teslemetry.VehicleSpecific.charge_port_door_open", + "tesla_fleet_api.teslemetry.Vehicle.charge_port_door_open", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/teslemetry/test_media_player.py b/tests/components/teslemetry/test_media_player.py index de990dbe7bc..ab8f21ceda4 100644 --- a/tests/components/teslemetry/test_media_player.py +++ b/tests/components/teslemetry/test_media_player.py @@ -76,7 +76,7 @@ async def test_media_player_services( entity_id = "media_player.test_media_player" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.adjust_volume", + "tesla_fleet_api.teslemetry.Vehicle.adjust_volume", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -90,7 +90,7 @@ async def test_media_player_services( call.assert_called_once() with patch( - "homeassistant.components.teslemetry.VehicleSpecific.media_toggle_playback", + "tesla_fleet_api.teslemetry.Vehicle.media_toggle_playback", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -105,7 +105,7 @@ async def test_media_player_services( # This test will fail without the previous call to pause playback with patch( - "homeassistant.components.teslemetry.VehicleSpecific.media_toggle_playback", + "tesla_fleet_api.teslemetry.Vehicle.media_toggle_playback", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -119,7 +119,7 @@ async def test_media_player_services( call.assert_called_once() with patch( - "homeassistant.components.teslemetry.VehicleSpecific.media_next_track", + "tesla_fleet_api.teslemetry.Vehicle.media_next_track", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -132,7 +132,7 @@ async def test_media_player_services( call.assert_called_once() with patch( - "homeassistant.components.teslemetry.VehicleSpecific.media_prev_track", + "tesla_fleet_api.teslemetry.Vehicle.media_prev_track", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/teslemetry/test_number.py b/tests/components/teslemetry/test_number.py index 95eed5a3f1e..2c45631a060 100644 --- a/tests/components/teslemetry/test_number.py +++ b/tests/components/teslemetry/test_number.py @@ -42,7 +42,7 @@ async def test_number_services( entity_id = "number.test_charge_current" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.set_charging_amps", + "tesla_fleet_api.teslemetry.Vehicle.set_charging_amps", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -57,7 +57,7 @@ async def test_number_services( entity_id = "number.test_charge_limit" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.set_charge_limit", + "tesla_fleet_api.teslemetry.Vehicle.set_charge_limit", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -72,7 +72,7 @@ async def test_number_services( entity_id = "number.energy_site_backup_reserve" with patch( - "homeassistant.components.teslemetry.EnergySpecific.backup", + "tesla_fleet_api.teslemetry.EnergySite.backup", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -90,7 +90,7 @@ async def test_number_services( entity_id = "number.energy_site_off_grid_reserve" with patch( - "homeassistant.components.teslemetry.EnergySpecific.off_grid_vehicle_charging_reserve", + "tesla_fleet_api.teslemetry.EnergySite.off_grid_vehicle_charging_reserve", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/teslemetry/test_select.py b/tests/components/teslemetry/test_select.py index c49e83803cd..b17b52903fa 100644 --- a/tests/components/teslemetry/test_select.py +++ b/tests/components/teslemetry/test_select.py @@ -41,7 +41,7 @@ async def test_select_services(hass: HomeAssistant, mock_vehicle_data) -> None: entity_id = "select.test_seat_heater_front_left" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.remote_seat_heater_request", + "tesla_fleet_api.teslemetry.Vehicle.remote_seat_heater_request", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -56,7 +56,7 @@ async def test_select_services(hass: HomeAssistant, mock_vehicle_data) -> None: entity_id = "select.test_steering_wheel_heater" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.remote_steering_wheel_heat_level_request", + "tesla_fleet_api.teslemetry.Vehicle.remote_steering_wheel_heat_level_request", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -71,7 +71,7 @@ async def test_select_services(hass: HomeAssistant, mock_vehicle_data) -> None: entity_id = "select.energy_site_operation_mode" with patch( - "homeassistant.components.teslemetry.EnergySpecific.operation", + "tesla_fleet_api.teslemetry.EnergySite.operation", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -89,7 +89,7 @@ async def test_select_services(hass: HomeAssistant, mock_vehicle_data) -> None: entity_id = "select.energy_site_allow_export" with patch( - "homeassistant.components.teslemetry.EnergySpecific.grid_import_export", + "tesla_fleet_api.teslemetry.EnergySite.grid_import_export", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/teslemetry/test_sensor.py b/tests/components/teslemetry/test_sensor.py index c3c2252ab89..213811f6ea0 100644 --- a/tests/components/teslemetry/test_sensor.py +++ b/tests/components/teslemetry/test_sensor.py @@ -31,9 +31,7 @@ async def test_sensors( freezer.move_to("2024-01-01 00:00:00+00:00") # Force the vehicle to use polling - with patch( - "homeassistant.components.teslemetry.VehicleSpecific.pre2021", return_value=True - ): + with patch("tesla_fleet_api.teslemetry.Vehicle.pre2021", return_value=True): entry = await setup_platform(hass, [Platform.SENSOR]) assert_entities(hass, entry.entry_id, entity_registry, snapshot) diff --git a/tests/components/teslemetry/test_services.py b/tests/components/teslemetry/test_services.py index a5b55f5dcc5..bcf5407999f 100644 --- a/tests/components/teslemetry/test_services.py +++ b/tests/components/teslemetry/test_services.py @@ -51,7 +51,7 @@ async def test_services( ).device_id with patch( - "homeassistant.components.teslemetry.VehicleSpecific.navigation_gps_request", + "tesla_fleet_api.teslemetry.Vehicle.navigation_gps_request", return_value=COMMAND_OK, ) as navigation_gps_request: await hass.services.async_call( @@ -66,7 +66,7 @@ async def test_services( navigation_gps_request.assert_called_once() with patch( - "homeassistant.components.teslemetry.VehicleSpecific.set_scheduled_charging", + "tesla_fleet_api.teslemetry.Vehicle.set_scheduled_charging", return_value=COMMAND_OK, ) as set_scheduled_charging: await hass.services.async_call( @@ -93,7 +93,7 @@ async def test_services( ) with patch( - "homeassistant.components.teslemetry.VehicleSpecific.set_scheduled_departure", + "tesla_fleet_api.teslemetry.Vehicle.set_scheduled_departure", return_value=COMMAND_OK, ) as set_scheduled_departure: await hass.services.async_call( @@ -138,7 +138,7 @@ async def test_services( ) with patch( - "homeassistant.components.teslemetry.VehicleSpecific.set_valet_mode", + "tesla_fleet_api.teslemetry.Vehicle.set_valet_mode", return_value=COMMAND_OK, ) as set_valet_mode: await hass.services.async_call( @@ -154,7 +154,7 @@ async def test_services( set_valet_mode.assert_called_once() with patch( - "homeassistant.components.teslemetry.VehicleSpecific.speed_limit_activate", + "tesla_fleet_api.teslemetry.Vehicle.speed_limit_activate", return_value=COMMAND_OK, ) as speed_limit_activate: await hass.services.async_call( @@ -170,7 +170,7 @@ async def test_services( speed_limit_activate.assert_called_once() with patch( - "homeassistant.components.teslemetry.VehicleSpecific.speed_limit_deactivate", + "tesla_fleet_api.teslemetry.Vehicle.speed_limit_deactivate", return_value=COMMAND_OK, ) as speed_limit_deactivate: await hass.services.async_call( @@ -186,7 +186,7 @@ async def test_services( speed_limit_deactivate.assert_called_once() with patch( - "homeassistant.components.teslemetry.EnergySpecific.time_of_use_settings", + "tesla_fleet_api.teslemetry.EnergySite.time_of_use_settings", return_value=COMMAND_OK, ) as set_time_of_use: await hass.services.async_call( @@ -202,7 +202,7 @@ async def test_services( with ( patch( - "homeassistant.components.teslemetry.EnergySpecific.time_of_use_settings", + "tesla_fleet_api.teslemetry.EnergySite.time_of_use_settings", return_value=COMMAND_ERROR, ) as set_time_of_use, pytest.raises(HomeAssistantError), diff --git a/tests/components/teslemetry/test_switch.py b/tests/components/teslemetry/test_switch.py index 17522f0ce2a..6b31a28db59 100644 --- a/tests/components/teslemetry/test_switch.py +++ b/tests/components/teslemetry/test_switch.py @@ -49,41 +49,41 @@ async def test_switch_alt( @pytest.mark.parametrize( ("name", "on", "off"), [ - ("test_charge", "VehicleSpecific.charge_start", "VehicleSpecific.charge_stop"), + ("test_charge", "Vehicle.charge_start", "Vehicle.charge_stop"), ( "test_auto_seat_climate_left", - "VehicleSpecific.remote_auto_seat_climate_request", - "VehicleSpecific.remote_auto_seat_climate_request", + "Vehicle.remote_auto_seat_climate_request", + "Vehicle.remote_auto_seat_climate_request", ), ( "test_auto_seat_climate_right", - "VehicleSpecific.remote_auto_seat_climate_request", - "VehicleSpecific.remote_auto_seat_climate_request", + "Vehicle.remote_auto_seat_climate_request", + "Vehicle.remote_auto_seat_climate_request", ), ( "test_auto_steering_wheel_heater", - "VehicleSpecific.remote_auto_steering_wheel_heat_climate_request", - "VehicleSpecific.remote_auto_steering_wheel_heat_climate_request", + "Vehicle.remote_auto_steering_wheel_heat_climate_request", + "Vehicle.remote_auto_steering_wheel_heat_climate_request", ), ( "test_defrost", - "VehicleSpecific.set_preconditioning_max", - "VehicleSpecific.set_preconditioning_max", + "Vehicle.set_preconditioning_max", + "Vehicle.set_preconditioning_max", ), ( "energy_site_storm_watch", - "EnergySpecific.storm_mode", - "EnergySpecific.storm_mode", + "EnergySite.storm_mode", + "EnergySite.storm_mode", ), ( "energy_site_allow_charging_from_grid", - "EnergySpecific.grid_import_export", - "EnergySpecific.grid_import_export", + "EnergySite.grid_import_export", + "EnergySite.grid_import_export", ), ( "test_sentry_mode", - "VehicleSpecific.set_sentry_mode", - "VehicleSpecific.set_sentry_mode", + "Vehicle.set_sentry_mode", + "Vehicle.set_sentry_mode", ), ], ) @@ -96,7 +96,7 @@ async def test_switch_services( entity_id = f"switch.{name}" with patch( - f"homeassistant.components.teslemetry.{on}", + f"tesla_fleet_api.teslemetry.{on}", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -110,7 +110,7 @@ async def test_switch_services( call.assert_called_once() with patch( - f"homeassistant.components.teslemetry.{off}", + f"tesla_fleet_api.teslemetry.{off}", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/teslemetry/test_update.py b/tests/components/teslemetry/test_update.py index 0f26b162043..af6c9d847f1 100644 --- a/tests/components/teslemetry/test_update.py +++ b/tests/components/teslemetry/test_update.py @@ -61,7 +61,7 @@ async def test_update_services( entity_id = "update.test_update" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.schedule_software_update", + "tesla_fleet_api.teslemetry.Vehicle.schedule_software_update", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/tessie/conftest.py b/tests/components/tessie/conftest.py index e0aba73af17..5fb844ff6b4 100644 --- a/tests/components/tessie/conftest.py +++ b/tests/components/tessie/conftest.py @@ -85,7 +85,7 @@ def mock_request(): def mock_live_status(): """Mock Tesla Fleet API EnergySpecific live_status method.""" with patch( - "homeassistant.components.tessie.EnergySpecific.live_status", + "tesla_fleet_api.tessie.EnergySite.live_status", side_effect=lambda: deepcopy(LIVE_STATUS), ) as mock_live_status: yield mock_live_status @@ -95,7 +95,7 @@ def mock_live_status(): def mock_site_info(): """Mock Tesla Fleet API EnergySpecific site_info method.""" with patch( - "homeassistant.components.tessie.EnergySpecific.site_info", + "tesla_fleet_api.tessie.EnergySite.site_info", side_effect=lambda: deepcopy(SITE_INFO), ) as mock_live_status: yield mock_live_status diff --git a/tests/components/tessie/test_number.py b/tests/components/tessie/test_number.py index 0fb13779183..69bbe1c9087 100644 --- a/tests/components/tessie/test_number.py +++ b/tests/components/tessie/test_number.py @@ -67,7 +67,7 @@ async def test_numbers( entity_id = "number.energy_site_backup_reserve" with patch( - "homeassistant.components.teslemetry.EnergySpecific.backup", + "tesla_fleet_api.tessie.EnergySite.backup", return_value=TEST_RESPONSE, ) as call: await hass.services.async_call( @@ -85,7 +85,7 @@ async def test_numbers( entity_id = "number.energy_site_off_grid_reserve" with patch( - "homeassistant.components.teslemetry.EnergySpecific.off_grid_vehicle_charging_reserve", + "tesla_fleet_api.tessie.EnergySite.off_grid_vehicle_charging_reserve", return_value=TEST_RESPONSE, ) as call: await hass.services.async_call( diff --git a/tests/components/tessie/test_select.py b/tests/components/tessie/test_select.py index c78923fbf5b..64380d363fc 100644 --- a/tests/components/tessie/test_select.py +++ b/tests/components/tessie/test_select.py @@ -52,7 +52,7 @@ async def test_select( # Test site operation mode entity_id = "select.energy_site_operation_mode" with patch( - "homeassistant.components.teslemetry.EnergySpecific.operation", + "tesla_fleet_api.tessie.EnergySite.operation", return_value=TEST_RESPONSE, ) as call: await hass.services.async_call( @@ -71,7 +71,7 @@ async def test_select( # Test site export mode entity_id = "select.energy_site_allow_export" with patch( - "homeassistant.components.teslemetry.EnergySpecific.grid_import_export", + "tesla_fleet_api.tessie.EnergySite.grid_import_export", return_value=TEST_RESPONSE, ) as call: await hass.services.async_call( @@ -129,7 +129,7 @@ async def test_errors(hass: HomeAssistant) -> None: # Test changing energy select with unknown error with ( patch( - "homeassistant.components.tessie.EnergySpecific.operation", + "tesla_fleet_api.tessie.EnergySite.operation", side_effect=UnsupportedVehicle, ) as mock_set, pytest.raises(HomeAssistantError) as error, diff --git a/tests/components/tessie/test_switch.py b/tests/components/tessie/test_switch.py index 690ad7d1ab4..f58468edfb7 100644 --- a/tests/components/tessie/test_switch.py +++ b/tests/components/tessie/test_switch.py @@ -61,13 +61,13 @@ async def test_switches( [ ( "energy_site_storm_watch", - "EnergySpecific.storm_mode", - "EnergySpecific.storm_mode", + "storm_mode", + "storm_mode", ), ( "energy_site_allow_charging_from_grid", - "EnergySpecific.grid_import_export", - "EnergySpecific.grid_import_export", + "grid_import_export", + "grid_import_export", ), ], ) @@ -80,7 +80,7 @@ async def test_switch_services( entity_id = f"switch.{name}" with patch( - f"homeassistant.components.teslemetry.{on}", + f"tesla_fleet_api.tessie.EnergySite.{on}", return_value=RESPONSE_OK, ) as call: await hass.services.async_call( @@ -94,7 +94,7 @@ async def test_switch_services( call.assert_called_once() with patch( - f"homeassistant.components.teslemetry.{off}", + f"tesla_fleet_api.tessie.EnergySite.{off}", return_value=RESPONSE_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/tractive/test_device_tracker.py b/tests/components/tractive/test_device_tracker.py index ff78173ef7b..ff9c7ca88ef 100644 --- a/tests/components/tractive/test_device_tracker.py +++ b/tests/components/tractive/test_device_tracker.py @@ -59,3 +59,31 @@ async def test_source_type_phone( hass.states.get("device_tracker.test_pet_tracker").attributes["source_type"] is SourceType.BLUETOOTH ) + + +async def test_source_type_gps( + hass: HomeAssistant, + mock_tractive_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test if the source type is GPS when the location sensor is KNOWN WIFI.""" + await init_integration(hass, mock_config_entry) + + mock_tractive_client.send_position_event( + mock_config_entry, + { + "tracker_id": "device_id_123", + "position": { + "latlong": [22.333, 44.555], + "accuracy": 99, + "sensor_used": "KNOWN_WIFI", + }, + }, + ) + mock_tractive_client.send_hardware_event(mock_config_entry) + await hass.async_block_till_done() + + assert ( + hass.states.get("device_tracker.test_pet_tracker").attributes["source_type"] + is SourceType.GPS + ) diff --git a/tests/components/usb/__init__.py b/tests/components/usb/__init__.py index 96d671d0958..6db0cea1ffe 100644 --- a/tests/components/usb/__init__.py +++ b/tests/components/usb/__init__.py @@ -1,44 +1,29 @@ """Tests for the USB Discovery integration.""" -from homeassistant.components.usb.models import USBDevice +from unittest.mock import patch -conbee_device = USBDevice( - device="/dev/cu.usbmodemDE24338801", - vid="1CF1", - pid="0030", - serial_number="DE2433880", - manufacturer="dresden elektronik ingenieurtechnik GmbH", - description="ConBee II", -) -slae_sh_device = USBDevice( - device="/dev/cu.usbserial-110", - vid="10C4", - pid="EA60", - serial_number="00_12_4B_00_22_98_88_7F", - manufacturer="Silicon Labs", - description="slae.sh cc2652rb stick - slaesh's iot stuff", -) -electro_lama_device = USBDevice( - device="/dev/cu.usbserial-110", - vid="1A86", - pid="7523", - serial_number=None, - manufacturer=None, - description="USB2.0-Serial", -) -skyconnect_macos_correct = USBDevice( - device="/dev/cu.SLAB_USBtoUART", - vid="10C4", - pid="EA60", - serial_number="9ab1da1ea4b3ed11956f4eaca7669f5d", - manufacturer="Nabu Casa", - description="SkyConnect v1.0", -) -skyconnect_macos_incorrect = USBDevice( - device="/dev/cu.usbserial-2110", - vid="10C4", - pid="EA60", - serial_number="9ab1da1ea4b3ed11956f4eaca7669f5d", - manufacturer="Nabu Casa", - description="SkyConnect v1.0", -) +from aiousbwatcher import InotifyNotAvailableError +import pytest + +from homeassistant.components.usb import async_request_scan as usb_async_request_scan +from homeassistant.core import HomeAssistant + + +@pytest.fixture(name="force_usb_polling_watcher") +def force_usb_polling_watcher(): + """Patch the USB integration to not use inotify and fall back to polling.""" + with patch( + "homeassistant.components.usb.AIOUSBWatcher.async_start", + side_effect=InotifyNotAvailableError, + ): + yield + + +def patch_scanned_serial_ports(**kwargs) -> None: + """Patch the USB integration's list of scanned serial ports.""" + return patch("homeassistant.components.usb.scan_serial_ports", **kwargs) + + +async def async_request_scan(hass: HomeAssistant) -> None: + """Request a USB scan.""" + return await usb_async_request_scan(hass) diff --git a/tests/components/usb/test_init.py b/tests/components/usb/test_init.py index 9730dba53d7..3a56e929b22 100644 --- a/tests/components/usb/test_init.py +++ b/tests/components/usb/test_init.py @@ -7,31 +7,40 @@ import os from typing import Any from unittest.mock import MagicMock, Mock, call, patch, sentinel -from aiousbwatcher import InotifyNotAvailableError import pytest from homeassistant.components import usb -from homeassistant.components.usb.utils import usb_device_from_port +from homeassistant.components.usb.models import USBDevice from homeassistant.const import EVENT_HOMEASSISTANT_STARTED, EVENT_HOMEASSISTANT_STOP from homeassistant.core import HomeAssistant from homeassistant.helpers.service_info.usb import UsbServiceInfo from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util -from . import conbee_device, slae_sh_device +from . import ( + force_usb_polling_watcher, # noqa: F401 + patch_scanned_serial_ports, +) from tests.common import async_fire_time_changed, import_and_test_deprecated_constant from tests.typing import WebSocketGenerator - -@pytest.fixture(name="aiousbwatcher_no_inotify") -def aiousbwatcher_no_inotify(): - """Patch AIOUSBWatcher to not use inotify.""" - with patch( - "homeassistant.components.usb.AIOUSBWatcher.async_start", - side_effect=InotifyNotAvailableError, - ): - yield +conbee_device = USBDevice( + device="/dev/cu.usbmodemDE24338801", + vid="1CF1", + pid="0030", + serial_number="DE2433880", + manufacturer="dresden elektronik ingenieurtechnik GmbH", + description="ConBee II", +) +slae_sh_device = USBDevice( + device="/dev/cu.usbserial-110", + vid="10C4", + pid="EA60", + serial_number="00_12_4B_00_22_98_88_7F", + manufacturer="Silicon Labs", + description="slae.sh cc2652rb stick - slaesh's iot stuff", +) async def test_aiousbwatcher_discovery( @@ -40,11 +49,11 @@ async def test_aiousbwatcher_discovery( """Test that aiousbwatcher can discover a device without raising an exception.""" new_usb = [{"domain": "test1", "vid": "3039"}, {"domain": "test2", "vid": "0FA0"}] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -63,7 +72,7 @@ async def test_aiousbwatcher_discovery( with ( patch("sys.platform", "linux"), patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch( "homeassistant.components.usb.AIOUSBWatcher", return_value=MockAIOUSBWatcher ), @@ -81,11 +90,11 @@ async def test_aiousbwatcher_discovery( await hass.async_block_till_done() assert len(mock_config_flow.mock_calls) == 1 - mock_comports.append( - MagicMock( + mock_ports.append( + USBDevice( device=slae_sh_device.device, - vid=4000, - pid=4000, + vid="0FA0", + pid="0FA0", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -107,7 +116,7 @@ async def test_aiousbwatcher_discovery( await hass.async_block_till_done() -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_polling_discovery( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -115,19 +124,19 @@ async def test_polling_discovery( new_usb = [{"domain": "test1", "vid": "3039"}] mock_comports_found_device = asyncio.Event() - def get_comports() -> list: - nonlocal mock_comports + def scan_serial_ports() -> list: + nonlocal mock_ports # Only "find" a device after a few invocations - if len(mock_comports.mock_calls) < 5: + if len(mock_ports.mock_calls) < 5: return [] mock_comports_found_device.set() return [ - MagicMock( + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -141,9 +150,7 @@ async def test_polling_discovery( timedelta(seconds=0.01), ), patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch( - "homeassistant.components.usb.comports", side_effect=get_comports - ) as mock_comports, + patch_scanned_serial_ports(side_effect=scan_serial_ports) as mock_ports, patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -163,16 +170,16 @@ async def test_polling_discovery( await hass.async_block_till_done() -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_removal_by_aiousbwatcher_before_started(hass: HomeAssistant) -> None: """Test a device is removed by the aiousbwatcher before started.""" new_usb = [{"domain": "test1", "vid": "3039", "pid": "3039"}] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -181,13 +188,13 @@ async def test_removal_by_aiousbwatcher_before_started(hass: HomeAssistant) -> N with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) await hass.async_block_till_done() - with patch("homeassistant.components.usb.comports", return_value=[]): + with patch_scanned_serial_ports(return_value=[]): hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() @@ -197,18 +204,18 @@ async def test_removal_by_aiousbwatcher_before_started(hass: HomeAssistant) -> N await hass.async_block_till_done() -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_discovered_by_websocket_scan( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test a device is discovered from websocket scan.""" new_usb = [{"domain": "test1", "vid": "3039", "pid": "3039"}] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -217,7 +224,7 @@ async def test_discovered_by_websocket_scan( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -234,7 +241,7 @@ async def test_discovered_by_websocket_scan( assert mock_config_flow.mock_calls[0][1][0] == "test1" -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_discovered_by_websocket_scan_limited_by_description_matcher( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -243,11 +250,11 @@ async def test_discovered_by_websocket_scan_limited_by_description_matcher( {"domain": "test1", "vid": "3039", "pid": "3039", "description": "*2652*"} ] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -256,7 +263,7 @@ async def test_discovered_by_websocket_scan_limited_by_description_matcher( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -273,7 +280,7 @@ async def test_discovered_by_websocket_scan_limited_by_description_matcher( assert mock_config_flow.mock_calls[0][1][0] == "test1" -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_most_targeted_matcher_wins( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -283,11 +290,11 @@ async def test_most_targeted_matcher_wins( {"domain": "more", "vid": "3039", "pid": "3039", "description": "*2652*"}, ] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -296,7 +303,7 @@ async def test_most_targeted_matcher_wins( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -313,7 +320,7 @@ async def test_most_targeted_matcher_wins( assert mock_config_flow.mock_calls[0][1][0] == "more" -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_discovered_by_websocket_scan_rejected_by_description_matcher( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -322,11 +329,11 @@ async def test_discovered_by_websocket_scan_rejected_by_description_matcher( {"domain": "test1", "vid": "3039", "pid": "3039", "description": "*not_it*"} ] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -335,7 +342,7 @@ async def test_discovered_by_websocket_scan_rejected_by_description_matcher( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -351,7 +358,7 @@ async def test_discovered_by_websocket_scan_rejected_by_description_matcher( assert len(mock_config_flow.mock_calls) == 0 -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_discovered_by_websocket_scan_limited_by_serial_number_matcher( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -365,11 +372,11 @@ async def test_discovered_by_websocket_scan_limited_by_serial_number_matcher( } ] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -378,7 +385,7 @@ async def test_discovered_by_websocket_scan_limited_by_serial_number_matcher( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -395,7 +402,7 @@ async def test_discovered_by_websocket_scan_limited_by_serial_number_matcher( assert mock_config_flow.mock_calls[0][1][0] == "test1" -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_discovered_by_websocket_scan_rejected_by_serial_number_matcher( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -404,11 +411,11 @@ async def test_discovered_by_websocket_scan_rejected_by_serial_number_matcher( {"domain": "test1", "vid": "3039", "pid": "3039", "serial_number": "123*"} ] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -417,7 +424,7 @@ async def test_discovered_by_websocket_scan_rejected_by_serial_number_matcher( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -433,7 +440,7 @@ async def test_discovered_by_websocket_scan_rejected_by_serial_number_matcher( assert len(mock_config_flow.mock_calls) == 0 -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_discovered_by_websocket_scan_limited_by_manufacturer_matcher( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -447,11 +454,11 @@ async def test_discovered_by_websocket_scan_limited_by_manufacturer_matcher( } ] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=conbee_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=conbee_device.serial_number, manufacturer=conbee_device.manufacturer, description=conbee_device.description, @@ -460,7 +467,7 @@ async def test_discovered_by_websocket_scan_limited_by_manufacturer_matcher( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -477,7 +484,7 @@ async def test_discovered_by_websocket_scan_limited_by_manufacturer_matcher( assert mock_config_flow.mock_calls[0][1][0] == "test1" -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_discovered_by_websocket_scan_rejected_by_manufacturer_matcher( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -491,11 +498,11 @@ async def test_discovered_by_websocket_scan_rejected_by_manufacturer_matcher( } ] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=conbee_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=conbee_device.serial_number, manufacturer=conbee_device.manufacturer, description=conbee_device.description, @@ -504,7 +511,7 @@ async def test_discovered_by_websocket_scan_rejected_by_manufacturer_matcher( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -520,7 +527,7 @@ async def test_discovered_by_websocket_scan_rejected_by_manufacturer_matcher( assert len(mock_config_flow.mock_calls) == 0 -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_discovered_by_websocket_rejected_with_empty_serial_number_only( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -529,11 +536,11 @@ async def test_discovered_by_websocket_rejected_with_empty_serial_number_only( {"domain": "test1", "vid": "3039", "pid": "3039", "serial_number": "123*"} ] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=conbee_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=None, manufacturer=None, description=None, @@ -542,7 +549,7 @@ async def test_discovered_by_websocket_rejected_with_empty_serial_number_only( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -558,18 +565,18 @@ async def test_discovered_by_websocket_rejected_with_empty_serial_number_only( assert len(mock_config_flow.mock_calls) == 0 -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_discovered_by_websocket_scan_match_vid_only( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test a device is discovered from websocket scan only matching vid.""" new_usb = [{"domain": "test1", "vid": "3039"}] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -578,7 +585,7 @@ async def test_discovered_by_websocket_scan_match_vid_only( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -595,18 +602,18 @@ async def test_discovered_by_websocket_scan_match_vid_only( assert mock_config_flow.mock_calls[0][1][0] == "test1" -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_discovered_by_websocket_scan_match_vid_wrong_pid( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test a device is discovered from websocket scan only matching vid but wrong pid.""" new_usb = [{"domain": "test1", "vid": "3039", "pid": "9999"}] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -615,7 +622,7 @@ async def test_discovered_by_websocket_scan_match_vid_wrong_pid( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -631,15 +638,15 @@ async def test_discovered_by_websocket_scan_match_vid_wrong_pid( assert len(mock_config_flow.mock_calls) == 0 -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_discovered_by_websocket_no_vid_pid( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test a device is discovered from websocket scan with no vid or pid.""" new_usb = [{"domain": "test1", "vid": "3039", "pid": "9999"}] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, vid=None, pid=None, @@ -651,7 +658,7 @@ async def test_discovered_by_websocket_no_vid_pid( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -667,18 +674,18 @@ async def test_discovered_by_websocket_no_vid_pid( assert len(mock_config_flow.mock_calls) == 0 -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_non_matching_discovered_by_scanner_after_started( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test a websocket scan that does not match.""" new_usb = [{"domain": "test1", "vid": "4444", "pid": "4444"}] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -687,7 +694,7 @@ async def test_non_matching_discovered_by_scanner_after_started( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -709,11 +716,11 @@ async def test_aiousbwatcher_on_wsl_fallback_without_throwing_exception( """Test that aiousbwatcher on WSL failure results in fallback to scanning without raising an exception.""" new_usb = [{"domain": "test1", "vid": "3039"}] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -722,7 +729,7 @@ async def test_aiousbwatcher_on_wsl_fallback_without_throwing_exception( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -743,17 +750,17 @@ async def test_discovered_by_aiousbwatcher_before_started(hass: HomeAssistant) - """Test a device is discovered since aiousbwatcher is now running.""" new_usb = [{"domain": "test1", "vid": "3039", "pid": "3039"}] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, ) ] - initial_mock_comports = [] + initial_ports = [] aiousbwatcher_callback = None def async_register_callback(callback): @@ -766,9 +773,7 @@ async def test_discovered_by_aiousbwatcher_before_started(hass: HomeAssistant) - with ( patch("sys.platform", "linux"), patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch( - "homeassistant.components.usb.comports", return_value=initial_mock_comports - ), + patch_scanned_serial_ports(return_value=initial_ports), patch( "homeassistant.components.usb.AIOUSBWatcher", return_value=MockAIOUSBWatcher ), @@ -782,7 +787,7 @@ async def test_discovered_by_aiousbwatcher_before_started(hass: HomeAssistant) - assert len(mock_config_flow.mock_calls) == 0 - initial_mock_comports.extend(mock_comports) + initial_ports.extend(mock_ports) aiousbwatcher_callback() await hass.async_block_till_done() @@ -874,18 +879,18 @@ def test_human_readable_device_name() -> None: assert "8A2A" in name -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_async_is_plugged_in( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test async_is_plugged_in.""" new_usb = [{"domain": "test1", "vid": "3039", "pid": "3039"}] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -899,7 +904,7 @@ async def test_async_is_plugged_in( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=[]), + patch_scanned_serial_ports(return_value=[]), patch.object(hass.config_entries.flow, "async_init"), ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -909,7 +914,7 @@ async def test_async_is_plugged_in( assert not usb.async_is_plugged_in(hass, matcher) with ( - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init"), ): ws_client = await hass_ws_client(hass) @@ -920,7 +925,7 @@ async def test_async_is_plugged_in( assert usb.async_is_plugged_in(hass, matcher) -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") @pytest.mark.parametrize( "matcher", [ @@ -940,7 +945,7 @@ async def test_async_is_plugged_in_case_enforcement( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=[]), + patch_scanned_serial_ports(return_value=[]), patch.object(hass.config_entries.flow, "async_init"), ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -952,7 +957,7 @@ async def test_async_is_plugged_in_case_enforcement( usb.async_is_plugged_in(hass, matcher) -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_web_socket_triggers_discovery_request_callbacks( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -961,7 +966,7 @@ async def test_web_socket_triggers_discovery_request_callbacks( with ( patch("homeassistant.components.usb.async_get_usb", return_value=[]), - patch("homeassistant.components.usb.comports", return_value=[]), + patch_scanned_serial_ports(return_value=[]), patch.object(hass.config_entries.flow, "async_init"), ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -987,7 +992,7 @@ async def test_web_socket_triggers_discovery_request_callbacks( assert len(mock_callback.mock_calls) == 1 -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_initial_scan_callback( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -997,7 +1002,7 @@ async def test_initial_scan_callback( with ( patch("homeassistant.components.usb.async_get_usb", return_value=[]), - patch("homeassistant.components.usb.comports", return_value=[]), + patch_scanned_serial_ports(return_value=[]), patch.object(hass.config_entries.flow, "async_init"), ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -1023,7 +1028,7 @@ async def test_initial_scan_callback( cancel_2() -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_cancel_initial_scan_callback( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -1032,7 +1037,7 @@ async def test_cancel_initial_scan_callback( with ( patch("homeassistant.components.usb.async_get_usb", return_value=[]), - patch("homeassistant.components.usb.comports", return_value=[]), + patch_scanned_serial_ports(return_value=[]), patch.object(hass.config_entries.flow, "async_init"), ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -1049,18 +1054,18 @@ async def test_cancel_initial_scan_callback( assert len(mock_callback.mock_calls) == 0 -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_resolve_serial_by_id( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test the discovery data resolves to serial/by-id.""" new_usb = [{"domain": "test1", "vid": "3039", "pid": "3039"}] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -1069,7 +1074,7 @@ async def test_resolve_serial_by_id( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch( "homeassistant.components.usb.get_serial_by_id", return_value="/dev/serial/by-id/bla", @@ -1091,73 +1096,73 @@ async def test_resolve_serial_by_id( assert mock_config_flow.mock_calls[0][2]["data"].device == "/dev/serial/by-id/bla" -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") @pytest.mark.parametrize( "ports", [ [ - MagicMock( + USBDevice( device="/dev/cu.usbserial-2120", - vid=0x3039, - pid=0x3039, + vid="3039", + pid="3039", serial_number=conbee_device.serial_number, manufacturer=conbee_device.manufacturer, description=conbee_device.description, ), - MagicMock( + USBDevice( device="/dev/cu.usbserial-1120", - vid=0x3039, - pid=0x3039, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, ), - MagicMock( + USBDevice( device="/dev/cu.SLAB_USBtoUART", - vid=0x3039, - pid=0x3039, + vid="3039", + pid="3039", serial_number=conbee_device.serial_number, manufacturer=conbee_device.manufacturer, description=conbee_device.description, ), - MagicMock( + USBDevice( device="/dev/cu.SLAB_USBtoUART2", - vid=0x3039, - pid=0x3039, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, ), ], [ - MagicMock( + USBDevice( device="/dev/cu.SLAB_USBtoUART2", - vid=0x3039, - pid=0x3039, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, ), - MagicMock( + USBDevice( device="/dev/cu.SLAB_USBtoUART", - vid=0x3039, - pid=0x3039, + vid="3039", + pid="3039", serial_number=conbee_device.serial_number, manufacturer=conbee_device.manufacturer, description=conbee_device.description, ), - MagicMock( + USBDevice( device="/dev/cu.usbserial-1120", - vid=0x3039, - pid=0x3039, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, ), - MagicMock( + USBDevice( device="/dev/cu.usbserial-2120", - vid=0x3039, - pid=0x3039, + vid="3039", + pid="3039", serial_number=conbee_device.serial_number, manufacturer=conbee_device.manufacturer, description=conbee_device.description, @@ -1177,7 +1182,7 @@ async def test_cp2102n_ordering_on_macos( with ( patch("sys.platform", "darwin"), patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=ports), + patch_scanned_serial_ports(return_value=ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -1224,34 +1229,31 @@ def test_deprecated_constants( ) -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") @patch("homeassistant.components.usb.REQUEST_SCAN_COOLDOWN", 0) async def test_register_port_event_callback( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test the registration of a port event callback.""" - port1 = Mock( + port1 = USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, ) - port2 = Mock( + port2 = USBDevice( device=conbee_device.device, - vid=12346, - pid=12346, + vid="303A", + pid="303A", serial_number=conbee_device.serial_number, manufacturer=conbee_device.manufacturer, description=conbee_device.description, ) - port1_usb = usb_device_from_port(port1) - port2_usb = usb_device_from_port(port2) - ws_client = await hass_ws_client(hass) mock_callback1 = Mock() @@ -1259,7 +1261,7 @@ async def test_register_port_event_callback( # Start off with no ports with ( - patch("homeassistant.components.usb.comports", return_value=[]), + patch_scanned_serial_ports(return_value=[]), ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -1270,13 +1272,13 @@ async def test_register_port_event_callback( assert mock_callback2.mock_calls == [] # Add two new ports - with patch("homeassistant.components.usb.comports", return_value=[port1, port2]): + with patch_scanned_serial_ports(return_value=[port1, port2]): await ws_client.send_json({"id": 1, "type": "usb/scan"}) response = await ws_client.receive_json() assert response["success"] - assert mock_callback1.mock_calls == [call({port1_usb, port2_usb}, set())] - assert mock_callback2.mock_calls == [call({port1_usb, port2_usb}, set())] + assert mock_callback1.mock_calls == [call({port1, port2}, set())] + assert mock_callback2.mock_calls == [call({port1, port2}, set())] # Cancel the second callback cancel2() @@ -1286,20 +1288,20 @@ async def test_register_port_event_callback( mock_callback2.reset_mock() # Remove port 2 - with patch("homeassistant.components.usb.comports", return_value=[port1]): + with patch_scanned_serial_ports(return_value=[port1]): await ws_client.send_json({"id": 2, "type": "usb/scan"}) response = await ws_client.receive_json() assert response["success"] await hass.async_block_till_done() - assert mock_callback1.mock_calls == [call(set(), {port2_usb})] + assert mock_callback1.mock_calls == [call(set(), {port2})] assert mock_callback2.mock_calls == [] # The second callback was unregistered mock_callback1.reset_mock() mock_callback2.reset_mock() # Keep port 2 removed - with patch("homeassistant.components.usb.comports", return_value=[port1]): + with patch_scanned_serial_ports(return_value=[port1]): await ws_client.send_json({"id": 3, "type": "usb/scan"}) response = await ws_client.receive_json() assert response["success"] @@ -1310,17 +1312,17 @@ async def test_register_port_event_callback( assert mock_callback2.mock_calls == [] # Unplug one and plug in the other - with patch("homeassistant.components.usb.comports", return_value=[port2]): + with patch_scanned_serial_ports(return_value=[port2]): await ws_client.send_json({"id": 4, "type": "usb/scan"}) response = await ws_client.receive_json() assert response["success"] await hass.async_block_till_done() - assert mock_callback1.mock_calls == [call({port2_usb}, {port1_usb})] + assert mock_callback1.mock_calls == [call({port2}, {port1})] assert mock_callback2.mock_calls == [] -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") @patch("homeassistant.components.usb.REQUEST_SCAN_COOLDOWN", 0) async def test_register_port_event_callback_failure( hass: HomeAssistant, @@ -1329,27 +1331,24 @@ async def test_register_port_event_callback_failure( ) -> None: """Test port event callback failure handling.""" - port1 = Mock( + port1 = USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, ) - port2 = Mock( + port2 = USBDevice( device=conbee_device.device, - vid=12346, - pid=12346, + vid="303A", + pid="303A", serial_number=conbee_device.serial_number, manufacturer=conbee_device.manufacturer, description=conbee_device.description, ) - port1_usb = usb_device_from_port(port1) - port2_usb = usb_device_from_port(port2) - ws_client = await hass_ws_client(hass) mock_callback1 = Mock(side_effect=RuntimeError("Failure 1")) @@ -1357,7 +1356,7 @@ async def test_register_port_event_callback_failure( # Start off with no ports with ( - patch("homeassistant.components.usb.comports", return_value=[]), + patch_scanned_serial_ports(return_value=[]), ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -1369,7 +1368,7 @@ async def test_register_port_event_callback_failure( # Add two new ports with ( - patch("homeassistant.components.usb.comports", return_value=[port1, port2]), + patch_scanned_serial_ports(return_value=[port1, port2]), caplog.at_level(logging.ERROR, logger="homeassistant.components.usb"), ): await ws_client.send_json({"id": 1, "type": "usb/scan"}) @@ -1378,8 +1377,8 @@ async def test_register_port_event_callback_failure( await hass.async_block_till_done() # Both were called even though they raised exceptions - assert mock_callback1.mock_calls == [call({port1_usb, port2_usb}, set())] - assert mock_callback2.mock_calls == [call({port1_usb, port2_usb}, set())] + assert mock_callback1.mock_calls == [call({port1, port2}, set())] + assert mock_callback2.mock_calls == [call({port1, port2}, set())] assert caplog.text.count("Error in USB port event callback") == 2 assert "Failure 1" in caplog.text diff --git a/tests/components/vodafone_station/test_button.py b/tests/components/vodafone_station/test_button.py index d5f377d3f6f..ade5eb78965 100644 --- a/tests/components/vodafone_station/test_button.py +++ b/tests/components/vodafone_station/test_button.py @@ -2,11 +2,20 @@ from unittest.mock import AsyncMock, patch +from aiovodafone.exceptions import ( + AlreadyLogged, + CannotAuthenticate, + CannotConnect, + GenericLoginError, +) +import pytest from syrupy import SnapshotAssertion from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.vodafone_station.const import DOMAIN from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from . import setup_integration @@ -46,3 +55,39 @@ async def test_pressing_button( blocking=True, ) mock_vodafone_station_router.restart_router.assert_called_once() + + +@pytest.mark.parametrize( + ("side_effect", "key", "error"), + [ + (CannotConnect, "cannot_execute_action", "CannotConnect()"), + (AlreadyLogged, "cannot_execute_action", "AlreadyLogged()"), + (GenericLoginError, "cannot_execute_action", "GenericLoginError()"), + (CannotAuthenticate, "cannot_authenticate", "CannotAuthenticate()"), + ], +) +async def test_button_fails( + hass: HomeAssistant, + mock_vodafone_station_router: AsyncMock, + mock_config_entry: MockConfigEntry, + side_effect: Exception, + key: str, + error: str, +) -> None: + """Test button action fails.""" + + await setup_integration(hass, mock_config_entry) + + mock_vodafone_station_router.restart_router.side_effect = side_effect + + with pytest.raises(HomeAssistantError) as exc_info: + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.vodafone_station_m123456789_restart"}, + blocking=True, + ) + + assert exc_info.value.translation_domain == DOMAIN + assert exc_info.value.translation_key == key + assert exc_info.value.translation_placeholders == {"error": error} diff --git a/tests/components/vodafone_station/test_config_flow.py b/tests/components/vodafone_station/test_config_flow.py index 68f8247bdf9..0648987eb27 100644 --- a/tests/components/vodafone_station/test_config_flow.py +++ b/tests/components/vodafone_station/test_config_flow.py @@ -228,3 +228,75 @@ async def test_options_flow( assert result["data"] == { CONF_CONSIDER_HOME: 37, } + + +async def test_reconfigure_successful( + hass: HomeAssistant, + mock_vodafone_station_router: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that the host can be reconfigured.""" + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + # original entry + assert mock_config_entry.data["host"] == "fake_host" + + reconfigure_result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "host": "192.168.100.60", + "password": "fake_password", + "username": "fake_username", + }, + ) + + assert reconfigure_result["type"] is FlowResultType.ABORT + assert reconfigure_result["reason"] == "reconfigure_successful" + + # changed entry + assert mock_config_entry.data["host"] == "192.168.100.60" + + +@pytest.mark.parametrize( + ("side_effect", "error"), + [ + (CannotConnect, "cannot_connect"), + (CannotAuthenticate, "invalid_auth"), + (AlreadyLogged, "already_logged"), + (ConnectionResetError, "unknown"), + ], +) +async def test_reconfigure_fails( + hass: HomeAssistant, + mock_vodafone_station_router: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, + side_effect: Exception, + error: str, +) -> None: + """Test that the host can be reconfigured.""" + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + mock_vodafone_station_router.login.side_effect = side_effect + + reconfigure_result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "host": "192.168.100.60", + "password": "fake_password", + "username": "fake_username", + }, + ) + + assert reconfigure_result["type"] is FlowResultType.FORM + assert reconfigure_result["step_id"] == "reconfigure" + assert reconfigure_result["errors"] == {"base": error} diff --git a/tests/components/vodafone_station/test_coordinator.py b/tests/components/vodafone_station/test_coordinator.py index 1a9470245c7..5f75b538803 100644 --- a/tests/components/vodafone_station/test_coordinator.py +++ b/tests/components/vodafone_station/test_coordinator.py @@ -40,8 +40,7 @@ async def test_coordinator_device_cleanup( device_tracker = f"device_tracker.{DEVICE_1_HOST}" - state = hass.states.get(device_tracker) - assert state is not None + assert hass.states.get(device_tracker) mock_vodafone_station_router.get_devices_data.return_value = { DEVICE_2_MAC: VodafoneStationDevice( @@ -59,10 +58,10 @@ async def test_coordinator_device_cleanup( async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(device_tracker) - assert state is None + assert hass.states.get(device_tracker) is None assert f"Skipping entity {DEVICE_2_HOST}" in caplog.text - device = device_registry.async_get_device(identifiers={(DOMAIN, DEVICE_1_MAC)}) - assert device is None + assert ( + device_registry.async_get_device(identifiers={(DOMAIN, DEVICE_1_MAC)}) is None + ) assert f"Removing device: {DEVICE_1_HOST}" in caplog.text diff --git a/tests/components/vodafone_station/test_device_tracker.py b/tests/components/vodafone_station/test_device_tracker.py index e172fa76de5..a94f4ad05c4 100644 --- a/tests/components/vodafone_station/test_device_tracker.py +++ b/tests/components/vodafone_station/test_device_tracker.py @@ -47,8 +47,7 @@ async def test_consider_home( device_tracker = f"device_tracker.{DEVICE_1_HOST}" - state = hass.states.get(device_tracker) - assert state + assert (state := hass.states.get(device_tracker)) assert state.state == STATE_HOME mock_vodafone_station_router.get_devices_data.return_value[ @@ -59,6 +58,5 @@ async def test_consider_home( async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(device_tracker) - assert state + assert (state := hass.states.get(device_tracker)) assert state.state == STATE_NOT_HOME diff --git a/tests/components/vodafone_station/test_sensor.py b/tests/components/vodafone_station/test_sensor.py index ddf97824c75..5f27b67e3dd 100644 --- a/tests/components/vodafone_station/test_sensor.py +++ b/tests/components/vodafone_station/test_sensor.py @@ -55,8 +55,7 @@ async def test_active_connection_type( active_connection_entity = "sensor.vodafone_station_m123456789_active_connection" - state = hass.states.get(active_connection_entity) - assert state + assert (state := hass.states.get(active_connection_entity)) assert state.state == STATE_UNKNOWN mock_vodafone_station_router.get_sensor_data.return_value[connection_type] = ( @@ -67,8 +66,7 @@ async def test_active_connection_type( async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(active_connection_entity) - assert state + assert (state := hass.states.get(active_connection_entity)) assert state.state == LINE_TYPES[index] @@ -85,8 +83,7 @@ async def test_uptime( uptime = "2024-11-19T20:19:00+00:00" uptime_entity = "sensor.vodafone_station_m123456789_uptime" - state = hass.states.get(uptime_entity) - assert state + assert (state := hass.states.get(uptime_entity)) assert state.state == uptime mock_vodafone_station_router.get_sensor_data.return_value["sys_uptime"] = "12:17:23" @@ -95,8 +92,7 @@ async def test_uptime( async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(uptime_entity) - assert state + assert (state := hass.states.get(uptime_entity)) assert state.state == uptime @@ -124,6 +120,5 @@ async def test_coordinator_client_connector_error( async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get("sensor.vodafone_station_m123456789_uptime") - assert state + assert (state := hass.states.get("sensor.vodafone_station_m123456789_uptime")) assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/watergate/snapshots/test_event.ambr b/tests/components/watergate/snapshots/test_event.ambr new file mode 100644 index 00000000000..97f453697ca --- /dev/null +++ b/tests/components/watergate/snapshots/test_event.ambr @@ -0,0 +1,111 @@ +# serializer version: 1 +# name: test_event[event.sonic_duration_auto_shut_off-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'duration_threshold', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.sonic_duration_auto_shut_off', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Duration auto shut-off', + 'platform': 'watergate', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'auto_shut_off_duration', + 'unique_id': 'a63182948ce2896a.auto_shut_off_duration', + 'unit_of_measurement': None, + }) +# --- +# name: test_event[event.sonic_duration_auto_shut_off-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'event_type': None, + 'event_types': list([ + 'duration_threshold', + ]), + 'friendly_name': 'Sonic Duration auto shut-off', + }), + 'context': , + 'entity_id': 'event.sonic_duration_auto_shut_off', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_event[event.sonic_volume_auto_shut_off-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'volume_threshold', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.sonic_volume_auto_shut_off', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Volume auto shut-off', + 'platform': 'watergate', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'auto_shut_off_volume', + 'unique_id': 'a63182948ce2896a.auto_shut_off_volume', + 'unit_of_measurement': None, + }) +# --- +# name: test_event[event.sonic_volume_auto_shut_off-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'event_type': None, + 'event_types': list([ + 'volume_threshold', + ]), + 'friendly_name': 'Sonic Volume auto shut-off', + }), + 'context': , + 'entity_id': 'event.sonic_volume_auto_shut_off', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/watergate/test_event.py b/tests/components/watergate/test_event.py new file mode 100644 index 00000000000..6997c3f1fdf --- /dev/null +++ b/tests/components/watergate/test_event.py @@ -0,0 +1,84 @@ +"""Tests for the Watergate event entity platform.""" + +from collections.abc import Generator + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.typing import StateType + +from . import init_integration +from .const import MOCK_WEBHOOK_ID + +from tests.common import AsyncMock, MockConfigEntry, patch, snapshot_platform +from tests.typing import ClientSessionGenerator + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_event( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_entry: MockConfigEntry, + mock_watergate_client: Generator[AsyncMock], + freezer: FrozenDateTimeFactory, + snapshot: SnapshotAssertion, +) -> None: + """Test states of the sensor.""" + freezer.move_to("2021-01-09 12:00:00+00:00") + with patch("homeassistant.components.watergate.PLATFORMS", [Platform.EVENT]): + await init_integration(hass, mock_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_entry.entry_id) + + +@pytest.mark.parametrize( + ("entity_id", "event_type"), + [ + ("sonic_volume_auto_shut_off", "volume_threshold"), + ("sonic_duration_auto_shut_off", "duration_threshold"), + ], +) +async def test_auto_shut_off_webhook( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + mock_entry: MockConfigEntry, + mock_watergate_client: Generator[AsyncMock], + entity_id: str, + event_type: str, +) -> None: + """Test if water flow webhook is handled correctly.""" + await init_integration(hass, mock_entry) + + def assert_state(entity_id: str, expected_state: str): + state = hass.states.get(f"event.{entity_id}") + assert state.state == str(expected_state) + + assert_state(entity_id, "unknown") + + telemetry_change_data = { + "type": "auto-shut-off-report", + "data": { + "type": event_type, + "volume": 1500, + "duration": 30, + "timestamp": 1730148016, + }, + } + client = await hass_client_no_auth() + await client.post(f"/api/webhook/{MOCK_WEBHOOK_ID}", json=telemetry_change_data) + + await hass.async_block_till_done() + + def assert_extra_state( + entity_id: str, attribute: str, expected_attribute: StateType + ): + attributes = hass.states.get(f"event.{entity_id}").attributes + assert attributes.get(attribute) == expected_attribute + + assert_extra_state(entity_id, "event_type", event_type) + assert_extra_state(entity_id, "volume", 1500) + assert_extra_state(entity_id, "duration", 30) diff --git a/tests/components/watergate/test_sensor.py b/tests/components/watergate/test_sensor.py index 78e375857ed..0bf883a1955 100644 --- a/tests/components/watergate/test_sensor.py +++ b/tests/components/watergate/test_sensor.py @@ -1,4 +1,4 @@ -"""Tests for the Watergate valve platform.""" +"""Tests for the Watergate sensor platform.""" from collections.abc import Generator diff --git a/tests/components/websocket_api/test_http.py b/tests/components/websocket_api/test_http.py index 03e30c11ee9..370aab1067a 100644 --- a/tests/components/websocket_api/test_http.py +++ b/tests/components/websocket_api/test_http.py @@ -241,7 +241,7 @@ async def test_pending_msg_peak( instance: http.WebSocketHandler = cast(http.WebSocketHandler, setup_instance) # Fill the queue past the allowed peak - for _ in range(10): + for _ in range(20): instance._send_message({"overload": "message"}) async_fire_time_changed( @@ -251,7 +251,7 @@ async def test_pending_msg_peak( msg = await websocket_client.receive() assert msg.type is WSMsgType.CLOSE assert "Client unable to keep up with pending messages" in caplog.text - assert "Stayed over 5 for 5 seconds" in caplog.text + assert "Stayed over 5 for 10 seconds" in caplog.text assert "overload" in caplog.text diff --git a/tests/components/whois/snapshots/test_config_flow.ambr b/tests/components/whois/snapshots/test_config_flow.ambr index 0d99b0596e3..97d6fde6376 100644 --- a/tests/components/whois/snapshots/test_config_flow.ambr +++ b/tests/components/whois/snapshots/test_config_flow.ambr @@ -175,6 +175,94 @@ 'version': 1, }) # --- +# name: test_full_flow_with_error[WhoisPrivateRegistry-private_registry] + FlowResultSnapshot({ + 'context': dict({ + 'source': 'user', + 'unique_id': 'example.com', + }), + 'data': dict({ + 'domain': 'example.com', + }), + 'description': None, + 'description_placeholders': None, + 'flow_id': , + 'handler': 'whois', + 'minor_version': 1, + 'options': dict({ + }), + 'result': ConfigEntrySnapshot({ + 'data': dict({ + 'domain': 'example.com', + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'whois', + 'entry_id': , + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'subentries': list([ + ]), + 'title': 'Example.com', + 'unique_id': 'example.com', + 'version': 1, + }), + 'subentries': tuple( + ), + 'title': 'Example.com', + 'type': , + 'version': 1, + }) +# --- +# name: test_full_flow_with_error[WhoisQuotaExceeded-quota_exceeded] + FlowResultSnapshot({ + 'context': dict({ + 'source': 'user', + 'unique_id': 'example.com', + }), + 'data': dict({ + 'domain': 'example.com', + }), + 'description': None, + 'description_placeholders': None, + 'flow_id': , + 'handler': 'whois', + 'minor_version': 1, + 'options': dict({ + }), + 'result': ConfigEntrySnapshot({ + 'data': dict({ + 'domain': 'example.com', + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'whois', + 'entry_id': , + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'subentries': list([ + ]), + 'title': 'Example.com', + 'unique_id': 'example.com', + 'version': 1, + }), + 'subentries': tuple( + ), + 'title': 'Example.com', + 'type': , + 'version': 1, + }) +# --- # name: test_full_user_flow FlowResultSnapshot({ 'context': dict({ diff --git a/tests/components/whois/test_config_flow.py b/tests/components/whois/test_config_flow.py index 35e40c4e809..6ab02887be2 100644 --- a/tests/components/whois/test_config_flow.py +++ b/tests/components/whois/test_config_flow.py @@ -9,6 +9,8 @@ from whois.exceptions import ( UnknownDateFormat, UnknownTld, WhoisCommandFailed, + WhoisPrivateRegistry, + WhoisQuotaExceeded, ) from homeassistant.components.whois.const import DOMAIN @@ -52,6 +54,8 @@ async def test_full_user_flow( (FailedParsingWhoisOutput, "unexpected_response"), (UnknownDateFormat, "unknown_date_format"), (WhoisCommandFailed, "whois_command_failed"), + (WhoisPrivateRegistry, "private_registry"), + (WhoisQuotaExceeded, "quota_exceeded"), ], ) async def test_full_flow_with_error( diff --git a/tests/components/wled/snapshots/test_select.ambr b/tests/components/wled/snapshots/test_select.ambr index ca3b0a5dc6e..d3f8fbcc21d 100644 --- a/tests/components/wled/snapshots/test_select.ambr +++ b/tests/components/wled/snapshots/test_select.ambr @@ -99,77 +99,77 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'WLED RGB Light Segment 1 color palette', 'options': list([ - 'Default', - '* Random Cycle', '* Color 1', - '* Colors 1&2', '* Color Gradient', + '* Colors 1&2', '* Colors Only', - 'Party', - 'Cloud', - 'Lava', - 'Ocean', - 'Forest', - 'Rainbow', - 'Rainbow Bands', - 'Sunset', - 'Rivendell', - 'Breeze', - 'Red & Blue', - 'Yellowout', + '* Random Cycle', 'Analogous', - 'Splash', - 'Pastel', - 'Sunset 2', - 'Beach', - 'Vintage', - 'Departure', - 'Landscape', - 'Beech', - 'Sherbet', - 'Hult', - 'Hult 64', - 'Drywet', - 'Jul', - 'Grintage', - 'Rewhi', - 'Tertiary', - 'Fire', - 'Icefire', - 'Cyane', - 'Light Pink', - 'Autumn', - 'Magenta', - 'Magred', - 'Yelmag', - 'Yelblu', - 'Orange & Teal', - 'Tiamat', 'April Night', - 'Orangery', - 'C9', - 'Sakura', - 'Aurora', + 'Aqua Flash', 'Atlantica', + 'Aurora', + 'Aurora 2', + 'Autumn', + 'Beach', + 'Beech', + 'Blink Red', + 'Breeze', + 'C9', 'C9 2', 'C9 New', - 'Temperature', - 'Aurora 2', - 'Retro Clown', 'Candy', - 'Toxy Reaf', + 'Candy2', + 'Cloud', + 'Cyane', + 'Default', + 'Departure', + 'Drywet', 'Fairy Reaf', - 'Semi Blue', - 'Pink Candy', - 'Red Reaf', - 'Aqua Flash', - 'Yelblu Hot', + 'Fire', + 'Forest', + 'Grintage', + 'Hult', + 'Hult 64', + 'Icefire', + 'Jul', + 'Landscape', + 'Lava', + 'Light Pink', 'Lite Light', + 'Magenta', + 'Magred', + 'Ocean', + 'Orange & Teal', + 'Orangery', + 'Party', + 'Pastel', + 'Pink Candy', + 'Rainbow', + 'Rainbow Bands', + 'Red & Blue', 'Red Flash', - 'Blink Red', + 'Red Reaf', 'Red Shift', 'Red Tide', - 'Candy2', + 'Retro Clown', + 'Rewhi', + 'Rivendell', + 'Sakura', + 'Semi Blue', + 'Sherbet', + 'Splash', + 'Sunset', + 'Sunset 2', + 'Temperature', + 'Tertiary', + 'Tiamat', + 'Toxy Reaf', + 'Vintage', + 'Yelblu', + 'Yelblu Hot', + 'Yellowout', + 'Yelmag', ]), }), 'context': , @@ -187,77 +187,77 @@ 'area_id': None, 'capabilities': dict({ 'options': list([ - 'Default', - '* Random Cycle', '* Color 1', - '* Colors 1&2', '* Color Gradient', + '* Colors 1&2', '* Colors Only', - 'Party', - 'Cloud', - 'Lava', - 'Ocean', - 'Forest', - 'Rainbow', - 'Rainbow Bands', - 'Sunset', - 'Rivendell', - 'Breeze', - 'Red & Blue', - 'Yellowout', + '* Random Cycle', 'Analogous', - 'Splash', - 'Pastel', - 'Sunset 2', - 'Beach', - 'Vintage', - 'Departure', - 'Landscape', - 'Beech', - 'Sherbet', - 'Hult', - 'Hult 64', - 'Drywet', - 'Jul', - 'Grintage', - 'Rewhi', - 'Tertiary', - 'Fire', - 'Icefire', - 'Cyane', - 'Light Pink', - 'Autumn', - 'Magenta', - 'Magred', - 'Yelmag', - 'Yelblu', - 'Orange & Teal', - 'Tiamat', 'April Night', - 'Orangery', - 'C9', - 'Sakura', - 'Aurora', + 'Aqua Flash', 'Atlantica', + 'Aurora', + 'Aurora 2', + 'Autumn', + 'Beach', + 'Beech', + 'Blink Red', + 'Breeze', + 'C9', 'C9 2', 'C9 New', - 'Temperature', - 'Aurora 2', - 'Retro Clown', 'Candy', - 'Toxy Reaf', + 'Candy2', + 'Cloud', + 'Cyane', + 'Default', + 'Departure', + 'Drywet', 'Fairy Reaf', - 'Semi Blue', - 'Pink Candy', - 'Red Reaf', - 'Aqua Flash', - 'Yelblu Hot', + 'Fire', + 'Forest', + 'Grintage', + 'Hult', + 'Hult 64', + 'Icefire', + 'Jul', + 'Landscape', + 'Lava', + 'Light Pink', 'Lite Light', + 'Magenta', + 'Magred', + 'Ocean', + 'Orange & Teal', + 'Orangery', + 'Party', + 'Pastel', + 'Pink Candy', + 'Rainbow', + 'Rainbow Bands', + 'Red & Blue', 'Red Flash', - 'Blink Red', + 'Red Reaf', 'Red Shift', 'Red Tide', - 'Candy2', + 'Retro Clown', + 'Rewhi', + 'Rivendell', + 'Sakura', + 'Semi Blue', + 'Sherbet', + 'Splash', + 'Sunset', + 'Sunset 2', + 'Temperature', + 'Tertiary', + 'Tiamat', + 'Toxy Reaf', + 'Vintage', + 'Yelblu', + 'Yelblu Hot', + 'Yellowout', + 'Yelmag', ]), }), 'config_entry_id': , diff --git a/tests/components/wyoming/conftest.py b/tests/components/wyoming/conftest.py index 018fff33821..125edc547c6 100644 --- a/tests/components/wyoming/conftest.py +++ b/tests/components/wyoming/conftest.py @@ -121,7 +121,9 @@ def handle_config_entry(hass: HomeAssistant) -> ConfigEntry: @pytest.fixture -async def init_wyoming_stt(hass: HomeAssistant, stt_config_entry: ConfigEntry): +async def init_wyoming_stt( + hass: HomeAssistant, stt_config_entry: ConfigEntry +) -> ConfigEntry: """Initialize Wyoming STT.""" with patch( "homeassistant.components.wyoming.data.load_wyoming_info", @@ -129,9 +131,13 @@ async def init_wyoming_stt(hass: HomeAssistant, stt_config_entry: ConfigEntry): ): await hass.config_entries.async_setup(stt_config_entry.entry_id) + return stt_config_entry + @pytest.fixture -async def init_wyoming_tts(hass: HomeAssistant, tts_config_entry: ConfigEntry): +async def init_wyoming_tts( + hass: HomeAssistant, tts_config_entry: ConfigEntry +) -> ConfigEntry: """Initialize Wyoming TTS.""" with patch( "homeassistant.components.wyoming.data.load_wyoming_info", @@ -139,11 +145,13 @@ async def init_wyoming_tts(hass: HomeAssistant, tts_config_entry: ConfigEntry): ): await hass.config_entries.async_setup(tts_config_entry.entry_id) + return tts_config_entry + @pytest.fixture async def init_wyoming_wake_word( hass: HomeAssistant, wake_word_config_entry: ConfigEntry -): +) -> ConfigEntry: """Initialize Wyoming Wake Word.""" with patch( "homeassistant.components.wyoming.data.load_wyoming_info", @@ -151,6 +159,8 @@ async def init_wyoming_wake_word( ): await hass.config_entries.async_setup(wake_word_config_entry.entry_id) + return wake_word_config_entry + @pytest.fixture async def init_wyoming_intent( diff --git a/tests/components/wyoming/test_websocket.py b/tests/components/wyoming/test_websocket.py new file mode 100644 index 00000000000..18b43321354 --- /dev/null +++ b/tests/components/wyoming/test_websocket.py @@ -0,0 +1,58 @@ +"""Websocket tests for Wyoming integration.""" + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant + +from tests.typing import WebSocketGenerator + + +async def test_info( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + init_components, + init_wyoming_stt: ConfigEntry, + init_wyoming_tts: ConfigEntry, + init_wyoming_wake_word: ConfigEntry, + init_wyoming_intent: ConfigEntry, + init_wyoming_handle: ConfigEntry, +) -> None: + """Test info websocket command.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "wyoming/info"}) + + # result + msg = await client.receive_json() + assert msg["success"] + + info = msg.get("result", {}).get("info", {}) + + # stt (speech-to-text) = asr (automated speech recognition) + assert init_wyoming_stt.entry_id in info + asr_info = info[init_wyoming_stt.entry_id].get("asr", []) + assert len(asr_info) == 1 + assert asr_info[0].get("name") == "Test ASR" + + # tts (text-to-speech) + assert init_wyoming_tts.entry_id in info + tts_info = info[init_wyoming_tts.entry_id].get("tts", []) + assert len(tts_info) == 1 + assert tts_info[0].get("name") == "Test TTS" + + # wake word detection + assert init_wyoming_wake_word.entry_id in info + wake_info = info[init_wyoming_wake_word.entry_id].get("wake", []) + assert len(wake_info) == 1 + assert wake_info[0].get("name") == "Test Wake Word" + + # intent recognition + assert init_wyoming_intent.entry_id in info + intent_info = info[init_wyoming_intent.entry_id].get("intent", []) + assert len(intent_info) == 1 + assert intent_info[0].get("name") == "Test Intent" + + # intent handling + assert init_wyoming_handle.entry_id in info + handle_info = info[init_wyoming_handle.entry_id].get("handle", []) + assert len(handle_info) == 1 + assert handle_info[0].get("name") == "Test Handle" diff --git a/tests/components/youtube/test_config_flow.py b/tests/components/youtube/test_config_flow.py index 73652d9b239..2cfb970928d 100644 --- a/tests/components/youtube/test_config_flow.py +++ b/tests/components/youtube/test_config_flow.py @@ -131,7 +131,51 @@ async def test_flow_abort_without_subscriptions( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, ) -> None: - """Check abort flow if user has no subscriptions.""" + """Check abort flow if user has no subscriptions and no own channel.""" + result = await hass.config_entries.flow.async_init( + "youtube", context={"source": config_entries.SOURCE_USER} + ) + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + + assert result["url"] == ( + f"{GOOGLE_AUTH_URI}?response_type=code&client_id={CLIENT_ID}" + "&redirect_uri=https://example.com/auth/external/callback" + f"&state={state}&scope={'+'.join(SCOPES)}" + "&access_type=offline&prompt=consent" + ) + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + service = MockYouTube( + channel_fixture="youtube/get_no_channel.json", + subscriptions_fixture="youtube/get_no_subscriptions.json", + ) + with ( + patch("homeassistant.components.youtube.async_setup_entry", return_value=True), + patch( + "homeassistant.components.youtube.config_flow.YouTube", return_value=service + ), + ): + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "no_channel" + + +@pytest.mark.usefixtures("current_request_with_host") +async def test_flow_without_subscriptions( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, +) -> None: + """Check flow continues even without subscriptions since user has their own channel.""" result = await hass.config_entries.flow.async_init( "youtube", context={"source": config_entries.SOURCE_USER} ) @@ -163,8 +207,30 @@ async def test_flow_abort_without_subscriptions( ), ): result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "no_subscriptions" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "channels" + + # Verify the form schema contains only the user's own channel + schema = result["data_schema"] + channels = schema.schema[CONF_CHANNELS].config["options"] + assert len(channels) == 1 + assert channels[0]["value"] == "UC_x5XG1OV2P6uZZ5FSM9Ttw" + assert "(Your Channel)" in channels[0]["label"] + + # Test selecting the own channel + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_CHANNELS: ["UC_x5XG1OV2P6uZZ5FSM9Ttw"]}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == TITLE + assert "result" in result + assert result["result"].unique_id == "UC_x5XG1OV2P6uZZ5FSM9Ttw" + assert "token" in result["result"].data + assert result["result"].data["token"]["access_token"] == "mock-access-token" + assert result["result"].data["token"]["refresh_token"] == "mock-refresh-token" + assert result["options"] == {CONF_CHANNELS: ["UC_x5XG1OV2P6uZZ5FSM9Ttw"]} @pytest.mark.usefixtures("current_request_with_host") @@ -373,3 +439,112 @@ async def test_options_flow( assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == {CONF_CHANNELS: ["UC_x5XG1OV2P6uZZ5FSM9Ttw"]} + + +@pytest.mark.usefixtures("current_request_with_host") +async def test_own_channel_included( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, +) -> None: + """Test that the user's own channel is included in the list of selectable channels.""" + result = await hass.config_entries.flow.async_init( + "youtube", context={"source": config_entries.SOURCE_USER} + ) + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + + assert result["url"] == ( + f"{GOOGLE_AUTH_URI}?response_type=code&client_id={CLIENT_ID}" + "&redirect_uri=https://example.com/auth/external/callback" + f"&state={state}&scope={'+'.join(SCOPES)}" + "&access_type=offline&prompt=consent" + ) + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + with ( + patch( + "homeassistant.components.youtube.async_setup_entry", return_value=True + ) as mock_setup, + patch( + "homeassistant.components.youtube.config_flow.YouTube", + return_value=MockYouTube(), + ), + ): + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "channels" + + # Verify the form schema contains the user's own channel + schema = result["data_schema"] + channels = schema.schema[CONF_CHANNELS].config["options"] + assert any( + channel["value"] == "UC_x5XG1OV2P6uZZ5FSM9Ttw" + and "(Your Channel)" in channel["label"] + for channel in channels + ) + + # Test selecting both own channel and a subscribed channel + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_CHANNELS: ["UC_x5XG1OV2P6uZZ5FSM9Ttw", "UC_x5XG1OV2P6uZZ5FSM9Ttw"] + }, + ) + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert len(mock_setup.mock_calls) == 1 + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == TITLE + assert "result" in result + assert result["result"].unique_id == "UC_x5XG1OV2P6uZZ5FSM9Ttw" + assert "token" in result["result"].data + assert result["result"].data["token"]["access_token"] == "mock-access-token" + assert result["result"].data["token"]["refresh_token"] == "mock-refresh-token" + assert result["options"] == { + CONF_CHANNELS: ["UC_x5XG1OV2P6uZZ5FSM9Ttw", "UC_x5XG1OV2P6uZZ5FSM9Ttw"] + } + + +async def test_options_flow_own_channel( + hass: HomeAssistant, setup_integration: ComponentSetup +) -> None: + """Test the options flow includes the user's own channel.""" + await setup_integration() + with patch( + "homeassistant.components.youtube.config_flow.YouTube", + return_value=MockYouTube(), + ): + entry = hass.config_entries.async_entries(DOMAIN)[0] + result = await hass.config_entries.options.async_init(entry.entry_id) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + # Verify the form schema contains the user's own channel + schema = result["data_schema"] + channels = schema.schema[CONF_CHANNELS].config["options"] + assert any( + channel["value"] == "UC_x5XG1OV2P6uZZ5FSM9Ttw" + and "(Your Channel)" in channel["label"] + for channel in channels + ) + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={CONF_CHANNELS: ["UC_x5XG1OV2P6uZZ5FSM9Ttw"]}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == {CONF_CHANNELS: ["UC_x5XG1OV2P6uZZ5FSM9Ttw"]} diff --git a/tests/components/zha/test_cover.py b/tests/components/zha/test_cover.py index e5d588aa1bf..4bc4d6c97cf 100644 --- a/tests/components/zha/test_cover.py +++ b/tests/components/zha/test_cover.py @@ -81,7 +81,7 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: cluster = zigpy_device.endpoints[1].window_covering cluster.PLUGGED_ATTR_READS = { WCAttrs.current_position_lift_percentage.name: 0, - WCAttrs.current_position_tilt_percentage.name: 42, + WCAttrs.current_position_tilt_percentage.name: 100, WCAttrs.window_covering_type.name: WCT.Tilt_blind_tilt_and_lift, WCAttrs.config_status.name: WCCS(~WCCS.Open_up_commands_reversed), } @@ -115,33 +115,33 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert state assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 100 - assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 58 + assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 - # test that the state has changed from unavailable to off + # test that the state has changed from open to closed await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 100} ) assert hass.states.get(entity_id).state == CoverState.CLOSED - # test to see if it opens + # test that it opens await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 0} ) assert hass.states.get(entity_id).state == CoverState.OPEN - # test that the state remains after tilting to 100% - await send_attributes_report( - hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 100} - ) - assert hass.states.get(entity_id).state == CoverState.OPEN - - # test to see the state remains after tilting to 0% + # test that the state remains after tilting to 0% (open) await send_attributes_report( hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 0} ) assert hass.states.get(entity_id).state == CoverState.OPEN - # close from UI + # test that the state remains after tilting to 100% (closed) + await send_attributes_report( + hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 100} + ) + assert hass.states.get(entity_id).state == CoverState.OPEN + + # close lift from UI with patch("zigpy.zcl.Cluster.request", return_value=[0x1, zcl_f.Status.SUCCESS]): await hass.services.async_call( COVER_DOMAIN, SERVICE_CLOSE_COVER, {"entity_id": entity_id}, blocking=True @@ -160,6 +160,11 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert hass.states.get(entity_id).state == CoverState.CLOSED + # close tilt from UI, needs re-opening first + await send_attributes_report( + hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 0} + ) + assert hass.states.get(entity_id).state == CoverState.OPEN with patch("zigpy.zcl.Cluster.request", return_value=[0x1, zcl_f.Status.SUCCESS]): await hass.services.async_call( COVER_DOMAIN, @@ -185,7 +190,7 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert hass.states.get(entity_id).state == CoverState.CLOSED - # open from UI + # open lift from UI with patch("zigpy.zcl.Cluster.request", return_value=[0x0, zcl_f.Status.SUCCESS]): await hass.services.async_call( COVER_DOMAIN, SERVICE_OPEN_COVER, {"entity_id": entity_id}, blocking=True @@ -204,6 +209,7 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert hass.states.get(entity_id).state == CoverState.OPEN + # open tilt from UI with patch("zigpy.zcl.Cluster.request", return_value=[0x0, zcl_f.Status.SUCCESS]): await hass.services.async_call( COVER_DOMAIN, @@ -229,7 +235,7 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert hass.states.get(entity_id).state == CoverState.OPEN - # set position UI + # set lift position from UI with patch("zigpy.zcl.Cluster.request", return_value=[0x5, zcl_f.Status.SUCCESS]): await hass.services.async_call( COVER_DOMAIN, @@ -261,6 +267,7 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert hass.states.get(entity_id).state == CoverState.OPEN + # set tilt position from UI with patch("zigpy.zcl.Cluster.request", return_value=[0x5, zcl_f.Status.SUCCESS]): await hass.services.async_call( COVER_DOMAIN, @@ -281,13 +288,13 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert hass.states.get(entity_id).state == CoverState.CLOSING await send_attributes_report( - hass, cluster, {WCAttrs.current_position_lift_percentage.id: 35} + hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 35} ) assert hass.states.get(entity_id).state == CoverState.CLOSING await send_attributes_report( - hass, cluster, {WCAttrs.current_position_lift_percentage.id: 53} + hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 53} ) assert hass.states.get(entity_id).state == CoverState.OPEN @@ -338,7 +345,7 @@ async def test_cover_failures( # load up cover domain cluster = zigpy_device.endpoints[1].window_covering cluster.PLUGGED_ATTR_READS = { - WCAttrs.current_position_tilt_percentage.name: 42, + WCAttrs.current_position_tilt_percentage.name: 100, WCAttrs.window_covering_type.name: WCT.Tilt_blind_tilt_and_lift, } update_attribute_cache(cluster) @@ -355,7 +362,7 @@ async def test_cover_failures( await send_attributes_report(hass, cluster, {0: 0, 8: 100, 1: 1}) assert hass.states.get(entity_id).state == CoverState.CLOSED - # test to see if it opens + # test that it opens await send_attributes_report(hass, cluster, {0: 1, 8: 0, 1: 100}) assert hass.states.get(entity_id).state == CoverState.OPEN diff --git a/tests/components/zwave_js/test_events.py b/tests/components/zwave_js/test_events.py index 0bb6376a02b..8cdaef3e63d 100644 --- a/tests/components/zwave_js/test_events.py +++ b/tests/components/zwave_js/test_events.py @@ -6,11 +6,18 @@ import pytest from zwave_js_server.const import CommandClass from zwave_js_server.event import Event +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from tests.common import async_capture_events +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [] + + async def test_scenes( hass: HomeAssistant, hank_binary_switch, integration, client ) -> None: @@ -244,6 +251,7 @@ async def test_notifications( assert events[2].data["command_class_name"] == "Multilevel Switch" +@pytest.mark.parametrize("platforms", [[Platform.SWITCH]]) async def test_value_updated( hass: HomeAssistant, vision_security_zl7432, integration, client ) -> None: diff --git a/tests/components/zwave_js/test_fan.py b/tests/components/zwave_js/test_fan.py index 2551fc7b34a..25ab6a87200 100644 --- a/tests/components/zwave_js/test_fan.py +++ b/tests/components/zwave_js/test_fan.py @@ -29,12 +29,19 @@ from homeassistant.const import ( STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN, + Platform, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [Platform.FAN] + + async def test_generic_fan( hass: HomeAssistant, client, fan_generic, integration ) -> None: diff --git a/tests/components/zwave_js/test_humidifier.py b/tests/components/zwave_js/test_humidifier.py index 261e09babee..78ea7899287 100644 --- a/tests/components/zwave_js/test_humidifier.py +++ b/tests/components/zwave_js/test_humidifier.py @@ -1,5 +1,6 @@ """Test the Z-Wave JS humidifier platform.""" +import pytest from zwave_js_server.const import CommandClass from zwave_js_server.const.command_class.humidity_control import HumidityControlMode from zwave_js_server.event import Event @@ -22,12 +23,19 @@ from homeassistant.const import ( STATE_OFF, STATE_ON, STATE_UNKNOWN, + Platform, ) from homeassistant.core import HomeAssistant from .common import DEHUMIDIFIER_ADC_T3000_ENTITY, HUMIDIFIER_ADC_T3000_ENTITY +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [Platform.HUMIDIFIER] + + async def test_humidifier( hass: HomeAssistant, client, climate_adc_t3000, integration ) -> None: diff --git a/tests/helpers/test_area_registry.py b/tests/helpers/test_area_registry.py index c69f039027e..3496c41ecf4 100644 --- a/tests/helpers/test_area_registry.py +++ b/tests/helpers/test_area_registry.py @@ -494,6 +494,29 @@ async def test_async_get_area_by_name(area_registry: ar.AreaRegistry) -> None: assert area_registry.async_get_area_by_name("M o c k 1").normalized_name == "mock1" +async def test_async_get_areas_by_alias( + area_registry: ar.AreaRegistry, +) -> None: + """Make sure we can get the areas by alias.""" + area1 = area_registry.async_create("Mock1", aliases=("alias_1", "alias_2")) + area2 = area_registry.async_create("Mock2", aliases=("alias_1", "alias_3")) + + assert len(area_registry.areas) == 2 + + alias1_list = area_registry.async_get_areas_by_alias("A l i a s_1") + alias2_list = area_registry.async_get_areas_by_alias("A l i a s_2") + alias3_list = area_registry.async_get_areas_by_alias("A l i a s_3") + + assert len(alias1_list) == 2 + assert len(alias2_list) == 1 + assert len(alias3_list) == 1 + + assert area1 in alias1_list + assert area1 in alias2_list + assert area2 in alias1_list + assert area2 in alias3_list + + async def test_async_get_area_by_name_not_found(area_registry: ar.AreaRegistry) -> None: """Make sure we return None for non-existent areas.""" area_registry.async_create("Mock1") diff --git a/tests/helpers/test_floor_registry.py b/tests/helpers/test_floor_registry.py index 6a672399522..5ebd63ae302 100644 --- a/tests/helpers/test_floor_registry.py +++ b/tests/helpers/test_floor_registry.py @@ -327,7 +327,7 @@ async def test_loading_floors_from_storage( assert len(registry.floors) == 1 -async def test_getting_floor(floor_registry: fr.FloorRegistry) -> None: +async def test_getting_floor_by_name(floor_registry: fr.FloorRegistry) -> None: """Make sure we can get the floors by name.""" floor = floor_registry.async_create("First floor") floor2 = floor_registry.async_get_floor_by_name("first floor") @@ -341,6 +341,27 @@ async def test_getting_floor(floor_registry: fr.FloorRegistry) -> None: assert get_floor == floor +async def test_async_get_floors_by_alias( + floor_registry: fr.FloorRegistry, +) -> None: + """Make sure we can get the floors by alias.""" + floor1 = floor_registry.async_create("First floor", aliases=("alias_1", "alias_2")) + floor2 = floor_registry.async_create("Second floor", aliases=("alias_1", "alias_3")) + + alias1_list = floor_registry.async_get_floors_by_alias("A l i a s_1") + alias2_list = floor_registry.async_get_floors_by_alias("A l i a s_2") + alias3_list = floor_registry.async_get_floors_by_alias("A l i a s_3") + + assert len(alias1_list) == 2 + assert len(alias2_list) == 1 + assert len(alias3_list) == 1 + + assert floor1 in alias1_list + assert floor1 in alias2_list + assert floor2 in alias1_list + assert floor2 in alias3_list + + async def test_async_get_floor_by_name_not_found( floor_registry: fr.FloorRegistry, ) -> None: diff --git a/tests/helpers/test_script.py b/tests/helpers/test_script.py index f8552fcefed..4c707590528 100644 --- a/tests/helpers/test_script.py +++ b/tests/helpers/test_script.py @@ -5853,14 +5853,16 @@ async def test_stop_action_subscript( ) +@pytest.mark.parametrize(("var", "response"), [(1, "If: Then"), (2, "Testing 123")]) @pytest.mark.parametrize( - ("var", "response"), - [(1, "If: Then"), (2, "Testing 123")], + ("script_mode", "max_runs"), [("single", 1), ("parallel", 2), ("queued", 2)] ) async def test_stop_action_response_variables( hass: HomeAssistant, var: int, response: str, + script_mode, + max_runs, ) -> None: """Test setting stop response_variable in a subscript.""" sequence = cv.SCRIPT_SCHEMA( @@ -5879,7 +5881,14 @@ async def test_stop_action_response_variables( {"stop": "In the name of love", "response_variable": "output"}, ] ) - script_obj = script.Script(hass, sequence, "Test Name", "test_domain") + script_obj = script.Script( + hass, + sequence, + "Test Name", + "test_domain", + script_mode=script_mode, + max_runs=max_runs, + ) run_vars = MappingProxyType({"var": var}) result = await script_obj.async_run(run_vars, context=Context()) diff --git a/tests/helpers/test_template.py b/tests/helpers/test_template.py index e4e73fc52d9..89d1c307fd7 100644 --- a/tests/helpers/test_template.py +++ b/tests/helpers/test_template.py @@ -6790,6 +6790,184 @@ def test_flatten(hass: HomeAssistant) -> None: template.Template("{{ flatten() }}", hass).async_render() +def test_intersect(hass: HomeAssistant) -> None: + """Test the intersect function and filter.""" + assert list( + template.Template( + "{{ intersect([1, 2, 5, 3, 4, 10], [1, 2, 3, 4, 5, 11, 99]) }}", hass + ).async_render() + ) == unordered([1, 2, 3, 4, 5]) + + assert list( + template.Template( + "{{ [1, 2, 5, 3, 4, 10] | intersect([1, 2, 3, 4, 5, 11, 99]) }}", hass + ).async_render() + ) == unordered([1, 2, 3, 4, 5]) + + assert list( + template.Template( + "{{ intersect(['a', 'b', 'c'], ['b', 'c', 'd']) }}", hass + ).async_render() + ) == unordered(["b", "c"]) + + assert list( + template.Template( + "{{ ['a', 'b', 'c'] | intersect(['b', 'c', 'd']) }}", hass + ).async_render() + ) == unordered(["b", "c"]) + + assert ( + template.Template("{{ intersect([], [1, 2, 3]) }}", hass).async_render() == [] + ) + + assert ( + template.Template("{{ [] | intersect([1, 2, 3]) }}", hass).async_render() == [] + ) + + with pytest.raises(TemplateError, match="intersect expected a list, got str"): + template.Template("{{ 'string' | intersect([1, 2, 3]) }}", hass).async_render() + + with pytest.raises(TemplateError, match="intersect expected a list, got str"): + template.Template("{{ [1, 2, 3] | intersect('string') }}", hass).async_render() + + +def test_difference(hass: HomeAssistant) -> None: + """Test the difference function and filter.""" + assert list( + template.Template( + "{{ difference([1, 2, 5, 3, 4, 10], [1, 2, 3, 4, 5, 11, 99]) }}", hass + ).async_render() + ) == [10] + + assert list( + template.Template( + "{{ [1, 2, 5, 3, 4, 10] | difference([1, 2, 3, 4, 5, 11, 99]) }}", hass + ).async_render() + ) == [10] + + assert list( + template.Template( + "{{ difference(['a', 'b', 'c'], ['b', 'c', 'd']) }}", hass + ).async_render() + ) == ["a"] + + assert list( + template.Template( + "{{ ['a', 'b', 'c'] | difference(['b', 'c', 'd']) }}", hass + ).async_render() + ) == ["a"] + + assert ( + template.Template("{{ difference([], [1, 2, 3]) }}", hass).async_render() == [] + ) + + assert ( + template.Template("{{ [] | difference([1, 2, 3]) }}", hass).async_render() == [] + ) + + with pytest.raises(TemplateError, match="difference expected a list, got str"): + template.Template("{{ 'string' | difference([1, 2, 3]) }}", hass).async_render() + + with pytest.raises(TemplateError, match="difference expected a list, got str"): + template.Template("{{ [1, 2, 3] | difference('string') }}", hass).async_render() + + +def test_union(hass: HomeAssistant) -> None: + """Test the union function and filter.""" + assert list( + template.Template( + "{{ union([1, 2, 5, 3, 4, 10], [1, 2, 3, 4, 5, 11, 99]) }}", hass + ).async_render() + ) == unordered([1, 2, 3, 4, 5, 10, 11, 99]) + + assert list( + template.Template( + "{{ [1, 2, 5, 3, 4, 10] | union([1, 2, 3, 4, 5, 11, 99]) }}", hass + ).async_render() + ) == unordered([1, 2, 3, 4, 5, 10, 11, 99]) + + assert list( + template.Template( + "{{ union(['a', 'b', 'c'], ['b', 'c', 'd']) }}", hass + ).async_render() + ) == unordered(["a", "b", "c", "d"]) + + assert list( + template.Template( + "{{ ['a', 'b', 'c'] | union(['b', 'c', 'd']) }}", hass + ).async_render() + ) == unordered(["a", "b", "c", "d"]) + + assert list( + template.Template("{{ union([], [1, 2, 3]) }}", hass).async_render() + ) == unordered([1, 2, 3]) + + assert list( + template.Template("{{ [] | union([1, 2, 3]) }}", hass).async_render() + ) == unordered([1, 2, 3]) + + with pytest.raises(TemplateError, match="union expected a list, got str"): + template.Template("{{ 'string' | union([1, 2, 3]) }}", hass).async_render() + + with pytest.raises(TemplateError, match="union expected a list, got str"): + template.Template("{{ [1, 2, 3] | union('string') }}", hass).async_render() + + +def test_symmetric_difference(hass: HomeAssistant) -> None: + """Test the symmetric_difference function and filter.""" + assert list( + template.Template( + "{{ symmetric_difference([1, 2, 5, 3, 4, 10], [1, 2, 3, 4, 5, 11, 99]) }}", + hass, + ).async_render() + ) == unordered([10, 11, 99]) + + assert list( + template.Template( + "{{ [1, 2, 5, 3, 4, 10] | symmetric_difference([1, 2, 3, 4, 5, 11, 99]) }}", + hass, + ).async_render() + ) == unordered([10, 11, 99]) + + assert list( + template.Template( + "{{ symmetric_difference(['a', 'b', 'c'], ['b', 'c', 'd']) }}", hass + ).async_render() + ) == unordered(["a", "d"]) + + assert list( + template.Template( + "{{ ['a', 'b', 'c'] | symmetric_difference(['b', 'c', 'd']) }}", hass + ).async_render() + ) == unordered(["a", "d"]) + + assert list( + template.Template( + "{{ symmetric_difference([], [1, 2, 3]) }}", hass + ).async_render() + ) == unordered([1, 2, 3]) + + assert list( + template.Template( + "{{ [] | symmetric_difference([1, 2, 3]) }}", hass + ).async_render() + ) == unordered([1, 2, 3]) + + with pytest.raises( + TemplateError, match="symmetric_difference expected a list, got str" + ): + template.Template( + "{{ 'string' | symmetric_difference([1, 2, 3]) }}", hass + ).async_render() + + with pytest.raises( + TemplateError, match="symmetric_difference expected a list, got str" + ): + template.Template( + "{{ [1, 2, 3] | symmetric_difference('string') }}", hass + ).async_render() + + def test_md5(hass: HomeAssistant) -> None: """Test the md5 function and filter.""" assert ( diff --git a/tests/test_data_entry_flow.py b/tests/test_data_entry_flow.py index 74a55cb4989..86ba5257001 100644 --- a/tests/test_data_entry_flow.py +++ b/tests/test_data_entry_flow.py @@ -133,6 +133,61 @@ async def test_show_form(manager: MockFlowManager) -> None: assert form["errors"] == {"username": "Should be unique."} +async def test_form_shows_with_added_suggested_values(manager: MockFlowManager) -> None: + """Test that we can show a form with suggested values.""" + schema = vol.Schema( + { + vol.Required("username"): str, + vol.Required("password"): str, + vol.Required("section_1"): data_entry_flow.section( + vol.Schema( + { + vol.Optional("full_name"): str, + } + ), + {"collapsed": False}, + ), + } + ) + + @manager.mock_reg_handler("test") + class TestFlow(data_entry_flow.FlowHandler): + async def async_step_init(self, user_input=None): + data_schema = self.add_suggested_values_to_schema( + schema, + { + "username": "doej", + "password": "verySecret1", + "section_1": {"full_name": "John Doe"}, + }, + ) + return self.async_show_form( + step_id="init", + data_schema=data_schema, + ) + + form = await manager.async_init("test") + assert form["type"] == data_entry_flow.FlowResultType.FORM + assert form["data_schema"].schema == schema.schema + markers = list(form["data_schema"].schema) + assert len(markers) == 3 + assert markers[0] == "username" + assert markers[0].description == {"suggested_value": "doej"} + assert markers[1] == "password" + assert markers[1].description == {"suggested_value": "verySecret1"} + assert markers[2] == "section_1" + section_validator = form["data_schema"].schema["section_1"] + assert isinstance(section_validator, data_entry_flow.section) + # The section class was not replaced + assert section_validator is schema.schema["section_1"] + # The section schema was not replaced + assert section_validator.schema is schema.schema["section_1"].schema + section_markers = list(section_validator.schema.schema) + assert len(section_markers) == 1 + assert section_markers[0] == "full_name" + assert section_markers[0].description == {"suggested_value": "John Doe"} + + async def test_abort_removes_instance(manager: MockFlowManager) -> None: """Test that abort removes the flow from progress."""