diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index 9a710eb3fb7..be4ca304950 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -32,7 +32,7 @@ jobs: fetch-depth: 0 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.1.0 + uses: actions/setup-python@v5.1.1 with: python-version: ${{ env.DEFAULT_PYTHON }} @@ -116,7 +116,7 @@ jobs: - name: Set up Python ${{ env.DEFAULT_PYTHON }} if: needs.init.outputs.channel == 'dev' - uses: actions/setup-python@v5.1.0 + uses: actions/setup-python@v5.1.1 with: python-version: ${{ env.DEFAULT_PYTHON }} @@ -453,7 +453,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.1.0 + uses: actions/setup-python@v5.1.1 with: python-version: ${{ env.DEFAULT_PYTHON }} diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 65eb1169381..caad898028c 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -229,7 +229,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.1.0 + uses: actions/setup-python@v5.1.1 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -274,7 +274,7 @@ jobs: - name: Check out code from GitHub uses: actions/checkout@v4.1.7 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.1.0 + uses: actions/setup-python@v5.1.1 id: python with: python-version: ${{ env.DEFAULT_PYTHON }} @@ -314,7 +314,7 @@ jobs: - name: Check out code from GitHub uses: actions/checkout@v4.1.7 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.1.0 + uses: actions/setup-python@v5.1.1 id: python with: python-version: ${{ env.DEFAULT_PYTHON }} @@ -353,7 +353,7 @@ jobs: - name: Check out code from GitHub uses: actions/checkout@v4.1.7 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.1.0 + uses: actions/setup-python@v5.1.1 id: python with: python-version: ${{ env.DEFAULT_PYTHON }} @@ -448,7 +448,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.1.0 + uses: actions/setup-python@v5.1.1 with: python-version: ${{ matrix.python-version }} check-latest: true @@ -532,7 +532,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.1.0 + uses: actions/setup-python@v5.1.1 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -564,7 +564,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.1.0 + uses: actions/setup-python@v5.1.1 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -595,7 +595,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.1.0 + uses: actions/setup-python@v5.1.1 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -637,7 +637,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.1.0 + uses: actions/setup-python@v5.1.1 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -682,7 +682,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.1.0 + uses: actions/setup-python@v5.1.1 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -726,7 +726,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.1.0 + uses: actions/setup-python@v5.1.1 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -800,7 +800,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.1.0 + uses: actions/setup-python@v5.1.1 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -863,7 +863,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.1.0 + uses: actions/setup-python@v5.1.1 with: python-version: ${{ matrix.python-version }} check-latest: true @@ -981,7 +981,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.1.0 + uses: actions/setup-python@v5.1.1 with: python-version: ${{ matrix.python-version }} check-latest: true @@ -1106,7 +1106,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.1.0 + uses: actions/setup-python@v5.1.1 with: python-version: ${{ matrix.python-version }} check-latest: true @@ -1251,7 +1251,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.1.0 + uses: actions/setup-python@v5.1.1 with: python-version: ${{ matrix.python-version }} check-latest: true diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index ef360b2124b..6e3a869cce0 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -24,11 +24,11 @@ jobs: uses: actions/checkout@v4.1.7 - name: Initialize CodeQL - uses: github/codeql-action/init@v3.25.11 + uses: github/codeql-action/init@v3.25.12 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3.25.11 + uses: github/codeql-action/analyze@v3.25.12 with: category: "/language:python" diff --git a/.github/workflows/translations.yml b/.github/workflows/translations.yml index 318a1898987..0ab95510480 100644 --- a/.github/workflows/translations.yml +++ b/.github/workflows/translations.yml @@ -22,7 +22,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.1.0 + uses: actions/setup-python@v5.1.1 with: python-version: ${{ env.DEFAULT_PYTHON }} diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 6d2ca187eac..b74406b9c82 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -36,7 +36,7 @@ jobs: - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.1.0 + uses: actions/setup-python@v5.1.1 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f9e7e8d683d..0f1766dc196 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.5.1 + rev: v0.5.2 hooks: - id: ruff args: diff --git a/.strict-typing b/.strict-typing index 7d6bd1286af..1707f0ca9c3 100644 --- a/.strict-typing +++ b/.strict-typing @@ -97,6 +97,7 @@ homeassistant.components.assist_pipeline.* homeassistant.components.asterisk_cdr.* homeassistant.components.asterisk_mbox.* homeassistant.components.asuswrt.* +homeassistant.components.autarco.* homeassistant.components.auth.* homeassistant.components.automation.* homeassistant.components.awair.* diff --git a/CODEOWNERS b/CODEOWNERS index 1cea777f782..a567d1748ad 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -155,6 +155,8 @@ build.json @home-assistant/supervisor /tests/components/aurora_abb_powerone/ @davet2001 /homeassistant/components/aussie_broadband/ @nickw444 @Bre77 /tests/components/aussie_broadband/ @nickw444 @Bre77 +/homeassistant/components/autarco/ @klaasnicolaas +/tests/components/autarco/ @klaasnicolaas /homeassistant/components/auth/ @home-assistant/core /tests/components/auth/ @home-assistant/core /homeassistant/components/automation/ @home-assistant/core @@ -706,6 +708,8 @@ build.json @home-assistant/supervisor /tests/components/isal/ @bdraco /homeassistant/components/islamic_prayer_times/ @engrbm87 @cpfair /tests/components/islamic_prayer_times/ @engrbm87 @cpfair +/homeassistant/components/israel_rail/ @shaiu +/tests/components/israel_rail/ @shaiu /homeassistant/components/iss/ @DurgNomis-drol /tests/components/iss/ @DurgNomis-drol /homeassistant/components/ista_ecotrend/ @tr4nt0r @@ -1209,6 +1213,8 @@ build.json @home-assistant/supervisor /tests/components/rtsp_to_webrtc/ @allenporter /homeassistant/components/ruckus_unleashed/ @lanrat @ms264556 @gabe565 /tests/components/ruckus_unleashed/ @lanrat @ms264556 @gabe565 +/homeassistant/components/russound_rio/ @noahhusby +/tests/components/russound_rio/ @noahhusby /homeassistant/components/ruuvi_gateway/ @akx /tests/components/ruuvi_gateway/ @akx /homeassistant/components/ruuvitag_ble/ @akx diff --git a/homeassistant/components/airzone/binary_sensor.py b/homeassistant/components/airzone/binary_sensor.py index 20878c08b82..eec78156fe0 100644 --- a/homeassistant/components/airzone/binary_sensor.py +++ b/homeassistant/components/airzone/binary_sensor.py @@ -82,33 +82,54 @@ async def async_setup_entry( """Add Airzone binary sensors from a config_entry.""" coordinator = entry.runtime_data - binary_sensors: list[AirzoneBinarySensor] = [ - AirzoneSystemBinarySensor( - coordinator, - description, - entry, - system_id, - system_data, - ) - for system_id, system_data in coordinator.data[AZD_SYSTEMS].items() - for description in SYSTEM_BINARY_SENSOR_TYPES - if description.key in system_data - ] + added_systems: set[str] = set() + added_zones: set[str] = set() - binary_sensors.extend( - AirzoneZoneBinarySensor( - coordinator, - description, - entry, - system_zone_id, - zone_data, - ) - for system_zone_id, zone_data in coordinator.data[AZD_ZONES].items() - for description in ZONE_BINARY_SENSOR_TYPES - if description.key in zone_data - ) + def _async_entity_listener() -> None: + """Handle additions of binary sensors.""" - async_add_entities(binary_sensors) + entities: list[AirzoneBinarySensor] = [] + + systems_data = coordinator.data.get(AZD_SYSTEMS, {}) + received_systems = set(systems_data) + new_systems = received_systems - added_systems + if new_systems: + entities.extend( + AirzoneSystemBinarySensor( + coordinator, + description, + entry, + system_id, + systems_data.get(system_id), + ) + for system_id in new_systems + for description in SYSTEM_BINARY_SENSOR_TYPES + if description.key in systems_data.get(system_id) + ) + added_systems.update(new_systems) + + zones_data = coordinator.data.get(AZD_ZONES, {}) + received_zones = set(zones_data) + new_zones = received_zones - added_zones + if new_zones: + entities.extend( + AirzoneZoneBinarySensor( + coordinator, + description, + entry, + system_zone_id, + zones_data.get(system_zone_id), + ) + for system_zone_id in new_zones + for description in ZONE_BINARY_SENSOR_TYPES + if description.key in zones_data.get(system_zone_id) + ) + added_zones.update(new_zones) + + async_add_entities(entities) + + entry.async_on_unload(coordinator.async_add_listener(_async_entity_listener)) + _async_entity_listener() class AirzoneBinarySensor(AirzoneEntity, BinarySensorEntity): diff --git a/homeassistant/components/airzone/climate.py b/homeassistant/components/airzone/climate.py index 33c84b67501..5e5e1c126de 100644 --- a/homeassistant/components/airzone/climate.py +++ b/homeassistant/components/airzone/climate.py @@ -102,17 +102,31 @@ async def async_setup_entry( entry: AirzoneConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: - """Add Airzone sensors from a config_entry.""" + """Add Airzone climate from a config_entry.""" coordinator = entry.runtime_data - async_add_entities( - AirzoneClimate( - coordinator, - entry, - system_zone_id, - zone_data, - ) - for system_zone_id, zone_data in coordinator.data[AZD_ZONES].items() - ) + + added_zones: set[str] = set() + + def _async_entity_listener() -> None: + """Handle additions of climate.""" + + zones_data = coordinator.data.get(AZD_ZONES, {}) + received_zones = set(zones_data) + new_zones = received_zones - added_zones + if new_zones: + async_add_entities( + AirzoneClimate( + coordinator, + entry, + system_zone_id, + zones_data.get(system_zone_id), + ) + for system_zone_id in new_zones + ) + added_zones.update(new_zones) + + entry.async_on_unload(coordinator.async_add_listener(_async_entity_listener)) + _async_entity_listener() class AirzoneClimate(AirzoneZoneEntity, ClimateEntity): diff --git a/homeassistant/components/airzone/manifest.json b/homeassistant/components/airzone/manifest.json index 889170e31d7..0a5b4b891aa 100644 --- a/homeassistant/components/airzone/manifest.json +++ b/homeassistant/components/airzone/manifest.json @@ -11,5 +11,5 @@ "documentation": "https://www.home-assistant.io/integrations/airzone", "iot_class": "local_polling", "loggers": ["aioairzone"], - "requirements": ["aioairzone==0.7.7"] + "requirements": ["aioairzone==0.8.0"] } diff --git a/homeassistant/components/airzone/select.py b/homeassistant/components/airzone/select.py index 8ffe86851b8..493150e5c6a 100644 --- a/homeassistant/components/airzone/select.py +++ b/homeassistant/components/airzone/select.py @@ -83,21 +83,34 @@ async def async_setup_entry( entry: AirzoneConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: - """Add Airzone sensors from a config_entry.""" + """Add Airzone select from a config_entry.""" coordinator = entry.runtime_data - async_add_entities( - AirzoneZoneSelect( - coordinator, - description, - entry, - system_zone_id, - zone_data, - ) - for description in ZONE_SELECT_TYPES - for system_zone_id, zone_data in coordinator.data[AZD_ZONES].items() - if description.key in zone_data - ) + added_zones: set[str] = set() + + def _async_entity_listener() -> None: + """Handle additions of select.""" + + zones_data = coordinator.data.get(AZD_ZONES, {}) + received_zones = set(zones_data) + new_zones = received_zones - added_zones + if new_zones: + async_add_entities( + AirzoneZoneSelect( + coordinator, + description, + entry, + system_zone_id, + zones_data.get(system_zone_id), + ) + for system_zone_id in new_zones + for description in ZONE_SELECT_TYPES + if description.key in zones_data.get(system_zone_id) + ) + added_zones.update(new_zones) + + entry.async_on_unload(coordinator.async_add_listener(_async_entity_listener)) + _async_entity_listener() class AirzoneBaseSelect(AirzoneEntity, SelectEntity): diff --git a/homeassistant/components/airzone/sensor.py b/homeassistant/components/airzone/sensor.py index 7cba0dc515c..ef8ddbb3b65 100644 --- a/homeassistant/components/airzone/sensor.py +++ b/homeassistant/components/airzone/sensor.py @@ -85,21 +85,37 @@ async def async_setup_entry( """Add Airzone sensors from a config_entry.""" coordinator = entry.runtime_data - sensors: list[AirzoneSensor] = [ - AirzoneZoneSensor( - coordinator, - description, - entry, - system_zone_id, - zone_data, - ) - for system_zone_id, zone_data in coordinator.data[AZD_ZONES].items() - for description in ZONE_SENSOR_TYPES - if description.key in zone_data - ] + added_zones: set[str] = set() + + def _async_entity_listener() -> None: + """Handle additions of sensors.""" + + entities: list[AirzoneSensor] = [] + + zones_data = coordinator.data.get(AZD_ZONES, {}) + received_zones = set(zones_data) + new_zones = received_zones - added_zones + if new_zones: + entities.extend( + AirzoneZoneSensor( + coordinator, + description, + entry, + system_zone_id, + zones_data.get(system_zone_id), + ) + for system_zone_id in new_zones + for description in ZONE_SENSOR_TYPES + if description.key in zones_data.get(system_zone_id) + ) + added_zones.update(new_zones) + + async_add_entities(entities) + + entities: list[AirzoneSensor] = [] if AZD_HOT_WATER in coordinator.data: - sensors.extend( + entities.extend( AirzoneHotWaterSensor( coordinator, description, @@ -110,7 +126,7 @@ async def async_setup_entry( ) if AZD_WEBSERVER in coordinator.data: - sensors.extend( + entities.extend( AirzoneWebServerSensor( coordinator, description, @@ -120,7 +136,10 @@ async def async_setup_entry( if description.key in coordinator.data[AZD_WEBSERVER] ) - async_add_entities(sensors) + async_add_entities(entities) + + entry.async_on_unload(coordinator.async_add_listener(_async_entity_listener)) + _async_entity_listener() class AirzoneSensor(AirzoneEntity, SensorEntity): diff --git a/homeassistant/components/airzone/water_heater.py b/homeassistant/components/airzone/water_heater.py index ed1c2069c27..8fd563b33d8 100644 --- a/homeassistant/components/airzone/water_heater.py +++ b/homeassistant/components/airzone/water_heater.py @@ -61,7 +61,7 @@ async def async_setup_entry( entry: AirzoneConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: - """Add Airzone sensors from a config_entry.""" + """Add Airzone Water Heater from a config_entry.""" coordinator = entry.runtime_data if AZD_HOT_WATER in coordinator.data: async_add_entities([AirzoneWaterHeater(coordinator, entry)]) diff --git a/homeassistant/components/airzone_cloud/manifest.json b/homeassistant/components/airzone_cloud/manifest.json index 555514ecf2a..e317dd82366 100644 --- a/homeassistant/components/airzone_cloud/manifest.json +++ b/homeassistant/components/airzone_cloud/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/airzone_cloud", "iot_class": "cloud_push", "loggers": ["aioairzone_cloud"], - "requirements": ["aioairzone-cloud==0.5.3"] + "requirements": ["aioairzone-cloud==0.5.4"] } diff --git a/homeassistant/components/alexa/handlers.py b/homeassistant/components/alexa/handlers.py index 6df4beccdc8..53bf6702138 100644 --- a/homeassistant/components/alexa/handlers.py +++ b/homeassistant/components/alexa/handlers.py @@ -1513,7 +1513,7 @@ async def async_api_adjust_range( if instance == f"{cover.DOMAIN}.{cover.ATTR_POSITION}": range_delta = int(range_delta * 20) if range_delta_default else int(range_delta) service = SERVICE_SET_COVER_POSITION - if not (current := entity.attributes.get(cover.ATTR_POSITION)): + if not (current := entity.attributes.get(cover.ATTR_CURRENT_POSITION)): msg = f"Unable to determine {entity.entity_id} current position" raise AlexaInvalidValueError(msg) position = response_value = min(100, max(0, range_delta + current)) diff --git a/homeassistant/components/amazon_polly/manifest.json b/homeassistant/components/amazon_polly/manifest.json index 73bbdd67162..b057967d1e2 100644 --- a/homeassistant/components/amazon_polly/manifest.json +++ b/homeassistant/components/amazon_polly/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/amazon_polly", "iot_class": "cloud_push", "loggers": ["boto3", "botocore", "s3transfer"], - "requirements": ["boto3==1.34.51"] + "requirements": ["boto3==1.34.131"] } diff --git a/homeassistant/components/analytics_insights/manifest.json b/homeassistant/components/analytics_insights/manifest.json index adf2d634ef8..3c484d36df7 100644 --- a/homeassistant/components/analytics_insights/manifest.json +++ b/homeassistant/components/analytics_insights/manifest.json @@ -7,6 +7,6 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["python_homeassistant_analytics"], - "requirements": ["python-homeassistant-analytics==0.6.0"], + "requirements": ["python-homeassistant-analytics==0.7.0"], "single_config_entry": true } diff --git a/homeassistant/components/androidtv/strings.json b/homeassistant/components/androidtv/strings.json index d6fdf78d1fb..3032e9ac6ef 100644 --- a/homeassistant/components/androidtv/strings.json +++ b/homeassistant/components/androidtv/strings.json @@ -101,7 +101,7 @@ }, "learn_sendevent": { "name": "Learn sendevent", - "description": "Translates a key press on a remote into ADB 'sendevent' commands. You must press one button on the remote within 8 seconds of calling this service." + "description": "Translates a key press on a remote into ADB 'sendevent' commands. You must press one button on the remote within 8 seconds of performing this action." } }, "exceptions": { diff --git a/homeassistant/components/aquacell/manifest.json b/homeassistant/components/aquacell/manifest.json index 559bdf345bb..de4a9986d6e 100644 --- a/homeassistant/components/aquacell/manifest.json +++ b/homeassistant/components/aquacell/manifest.json @@ -8,5 +8,5 @@ "integration_type": "device", "iot_class": "cloud_polling", "loggers": ["aioaquacell"], - "requirements": ["aioaquacell==0.1.8"] + "requirements": ["aioaquacell==0.2.0"] } diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index 83d0e985b8a..293c94c9629 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -28,5 +28,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==6.4.2", "yalexs-ble==2.4.3"] + "requirements": ["yalexs==6.4.3", "yalexs-ble==2.4.3"] } diff --git a/homeassistant/components/autarco/__init__.py b/homeassistant/components/autarco/__init__.py new file mode 100644 index 00000000000..0e29b25ad80 --- /dev/null +++ b/homeassistant/components/autarco/__init__.py @@ -0,0 +1,49 @@ +"""The Autarco integration.""" + +from __future__ import annotations + +import asyncio + +from autarco import Autarco + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .coordinator import AutarcoDataUpdateCoordinator + +PLATFORMS: list[Platform] = [Platform.SENSOR] + +type AutarcoConfigEntry = ConfigEntry[list[AutarcoDataUpdateCoordinator]] + + +async def async_setup_entry(hass: HomeAssistant, entry: AutarcoConfigEntry) -> bool: + """Set up Autarco from a config entry.""" + client = Autarco( + email=entry.data[CONF_EMAIL], + password=entry.data[CONF_PASSWORD], + session=async_get_clientsession(hass), + ) + account_sites = await client.get_account() + + coordinators: list[AutarcoDataUpdateCoordinator] = [ + AutarcoDataUpdateCoordinator(hass, client, site) for site in account_sites + ] + + await asyncio.gather( + *[ + coordinator.async_config_entry_first_refresh() + for coordinator in coordinators + ] + ) + + entry.runtime_data = coordinators + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: AutarcoConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/autarco/config_flow.py b/homeassistant/components/autarco/config_flow.py new file mode 100644 index 00000000000..a66f14047a7 --- /dev/null +++ b/homeassistant/components/autarco/config_flow.py @@ -0,0 +1,57 @@ +"""Config flow for Autarco integration.""" + +from __future__ import annotations + +from typing import Any + +from autarco import Autarco, AutarcoAuthenticationError, AutarcoConnectionError +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .const import DOMAIN + +DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_EMAIL): str, + vol.Required(CONF_PASSWORD): str, + } +) + + +class AutarcoConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Autarco.""" + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors: dict[str, str] = {} + if user_input is not None: + self._async_abort_entries_match({CONF_EMAIL: user_input[CONF_EMAIL]}) + client = Autarco( + email=user_input[CONF_EMAIL], + password=user_input[CONF_PASSWORD], + session=async_get_clientsession(self.hass), + ) + try: + await client.get_account() + except AutarcoAuthenticationError: + errors["base"] = "invalid_auth" + except AutarcoConnectionError: + errors["base"] = "cannot_connect" + else: + return self.async_create_entry( + title=user_input[CONF_EMAIL], + data={ + CONF_EMAIL: user_input[CONF_EMAIL], + CONF_PASSWORD: user_input[CONF_PASSWORD], + }, + ) + return self.async_show_form( + step_id="user", + errors=errors, + data_schema=DATA_SCHEMA, + ) diff --git a/homeassistant/components/autarco/const.py b/homeassistant/components/autarco/const.py new file mode 100644 index 00000000000..45a2825e793 --- /dev/null +++ b/homeassistant/components/autarco/const.py @@ -0,0 +1,11 @@ +"""Constants for the Autarco integration.""" + +from __future__ import annotations + +from datetime import timedelta +import logging +from typing import Final + +DOMAIN: Final = "autarco" +LOGGER = logging.getLogger(__package__) +SCAN_INTERVAL = timedelta(minutes=5) diff --git a/homeassistant/components/autarco/coordinator.py b/homeassistant/components/autarco/coordinator.py new file mode 100644 index 00000000000..82eb4439a86 --- /dev/null +++ b/homeassistant/components/autarco/coordinator.py @@ -0,0 +1,49 @@ +"""Coordinator for Autarco integration.""" + +from __future__ import annotations + +from typing import NamedTuple + +from autarco import AccountSite, Autarco, Inverter, Solar + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + +from .const import DOMAIN, LOGGER, SCAN_INTERVAL + + +class AutarcoData(NamedTuple): + """Class for defining data in dict.""" + + solar: Solar + inverters: dict[str, Inverter] + + +class AutarcoDataUpdateCoordinator(DataUpdateCoordinator[AutarcoData]): + """Class to manage fetching Autarco data from the API.""" + + config_entry: ConfigEntry + + def __init__( + self, + hass: HomeAssistant, + client: Autarco, + site: AccountSite, + ) -> None: + """Initialize global Autarco data updater.""" + super().__init__( + hass, + LOGGER, + name=DOMAIN, + update_interval=SCAN_INTERVAL, + ) + self.client = client + self.site = site + + async def _async_update_data(self) -> AutarcoData: + """Fetch data from Autarco API.""" + return AutarcoData( + solar=await self.client.get_solar(self.site.public_key), + inverters=await self.client.get_inverters(self.site.public_key), + ) diff --git a/homeassistant/components/autarco/diagnostics.py b/homeassistant/components/autarco/diagnostics.py new file mode 100644 index 00000000000..d1b082fd307 --- /dev/null +++ b/homeassistant/components/autarco/diagnostics.py @@ -0,0 +1,43 @@ +"""Support for the Autarco diagnostics.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.core import HomeAssistant + +from . import AutarcoConfigEntry, AutarcoDataUpdateCoordinator + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, config_entry: AutarcoConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + autarco_data: list[AutarcoDataUpdateCoordinator] = config_entry.runtime_data + + return { + "sites_data": [ + { + "id": coordinator.site.site_id, + "name": coordinator.site.system_name, + "health": coordinator.site.health, + "solar": { + "power_production": coordinator.data.solar.power_production, + "energy_production_today": coordinator.data.solar.energy_production_today, + "energy_production_month": coordinator.data.solar.energy_production_month, + "energy_production_total": coordinator.data.solar.energy_production_total, + }, + "inverters": [ + { + "serial_number": inverter.serial_number, + "out_ac_power": inverter.out_ac_power, + "out_ac_energy_total": inverter.out_ac_energy_total, + "grid_turned_off": inverter.grid_turned_off, + "health": inverter.health, + } + for inverter in coordinator.data.inverters.values() + ], + } + for coordinator in autarco_data + ], + } diff --git a/homeassistant/components/autarco/manifest.json b/homeassistant/components/autarco/manifest.json new file mode 100644 index 00000000000..f0900472b1e --- /dev/null +++ b/homeassistant/components/autarco/manifest.json @@ -0,0 +1,9 @@ +{ + "domain": "autarco", + "name": "Autarco", + "codeowners": ["@klaasnicolaas"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/autarco", + "iot_class": "cloud_polling", + "requirements": ["autarco==2.0.0"] +} diff --git a/homeassistant/components/autarco/sensor.py b/homeassistant/components/autarco/sensor.py new file mode 100644 index 00000000000..2352cdee060 --- /dev/null +++ b/homeassistant/components/autarco/sensor.py @@ -0,0 +1,189 @@ +"""Support for Autarco sensors.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass + +from autarco import Inverter, Solar + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import UnitOfEnergy, UnitOfPower +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from . import AutarcoConfigEntry +from .const import DOMAIN +from .coordinator import AutarcoDataUpdateCoordinator + + +@dataclass(frozen=True, kw_only=True) +class AutarcoSolarSensorEntityDescription(SensorEntityDescription): + """Describes an Autarco sensor entity.""" + + value_fn: Callable[[Solar], StateType] + + +SENSORS_SOLAR: tuple[AutarcoSolarSensorEntityDescription, ...] = ( + AutarcoSolarSensorEntityDescription( + key="power_production", + translation_key="power_production", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda solar: solar.power_production, + ), + AutarcoSolarSensorEntityDescription( + key="energy_production_today", + translation_key="energy_production_today", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + value_fn=lambda solar: solar.energy_production_today, + ), + AutarcoSolarSensorEntityDescription( + key="energy_production_month", + translation_key="energy_production_month", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + value_fn=lambda solar: solar.energy_production_month, + ), + AutarcoSolarSensorEntityDescription( + key="energy_production_total", + translation_key="energy_production_total", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda solar: solar.energy_production_total, + ), +) + + +@dataclass(frozen=True, kw_only=True) +class AutarcoInverterSensorEntityDescription(SensorEntityDescription): + """Describes an Autarco inverter sensor entity.""" + + value_fn: Callable[[Inverter], StateType] + + +SENSORS_INVERTER: tuple[AutarcoInverterSensorEntityDescription, ...] = ( + AutarcoInverterSensorEntityDescription( + key="out_ac_power", + translation_key="out_ac_power", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda inverter: inverter.out_ac_power, + ), + AutarcoInverterSensorEntityDescription( + key="out_ac_energy_total", + translation_key="out_ac_energy_total", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda inverter: inverter.out_ac_energy_total, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: AutarcoConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Autarco sensors based on a config entry.""" + entities: list[SensorEntity] = [] + for coordinator in entry.runtime_data: + entities.extend( + AutarcoSolarSensorEntity( + coordinator=coordinator, + description=description, + ) + for description in SENSORS_SOLAR + ) + entities.extend( + AutarcoInverterSensorEntity( + coordinator=coordinator, + description=description, + serial_number=inverter, + ) + for description in SENSORS_INVERTER + for inverter in coordinator.data.inverters + ) + async_add_entities(entities) + + +class AutarcoSolarSensorEntity( + CoordinatorEntity[AutarcoDataUpdateCoordinator], SensorEntity +): + """Defines an Autarco solar sensor.""" + + entity_description: AutarcoSolarSensorEntityDescription + _attr_has_entity_name = True + + def __init__( + self, + *, + coordinator: AutarcoDataUpdateCoordinator, + description: AutarcoSolarSensorEntityDescription, + ) -> None: + """Initialize Autarco sensor.""" + super().__init__(coordinator) + + self.entity_description = description + self._attr_unique_id = f"{coordinator.site.site_id}_solar_{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, f"{coordinator.site.site_id}_solar")}, + entry_type=DeviceEntryType.SERVICE, + manufacturer="Autarco", + name="Solar", + ) + + @property + def native_value(self) -> StateType: + """Return the state of the sensor.""" + return self.entity_description.value_fn(self.coordinator.data.solar) + + +class AutarcoInverterSensorEntity( + CoordinatorEntity[AutarcoDataUpdateCoordinator], SensorEntity +): + """Defines an Autarco inverter sensor.""" + + entity_description: AutarcoInverterSensorEntityDescription + _attr_has_entity_name = True + + def __init__( + self, + *, + coordinator: AutarcoDataUpdateCoordinator, + description: AutarcoInverterSensorEntityDescription, + serial_number: str, + ) -> None: + """Initialize Autarco sensor.""" + super().__init__(coordinator) + + self.entity_description = description + self._serial_number = serial_number + self._attr_unique_id = f"{serial_number}_{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, serial_number)}, + name=f"Inverter {serial_number}", + manufacturer="Autarco", + model="Inverter", + serial_number=serial_number, + ) + + @property + def native_value(self) -> StateType: + """Return the state of the sensor.""" + return self.entity_description.value_fn( + self.coordinator.data.inverters[self._serial_number] + ) diff --git a/homeassistant/components/autarco/strings.json b/homeassistant/components/autarco/strings.json new file mode 100644 index 00000000000..2eff962a13a --- /dev/null +++ b/homeassistant/components/autarco/strings.json @@ -0,0 +1,46 @@ +{ + "config": { + "step": { + "user": { + "description": "Connect to your Autarco account to get information about your solar panels.", + "data": { + "email": "[%key:common::config_flow::data::email%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "email": "The email address of your Autarco account.", + "password": "The password of your Autarco account." + } + } + }, + "error": { + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + }, + "entity": { + "sensor": { + "power_production": { + "name": "Power production" + }, + "energy_production_today": { + "name": "Energy production today" + }, + "energy_production_month": { + "name": "Energy production month" + }, + "energy_production_total": { + "name": "Energy production total" + }, + "out_ac_power": { + "name": "Power AC output" + }, + "out_ac_energy_total": { + "name": "Energy AC output total" + } + } + } +} diff --git a/homeassistant/components/automation/strings.json b/homeassistant/components/automation/strings.json index c0750a38ca8..3d6dcbb4474 100644 --- a/homeassistant/components/automation/strings.json +++ b/homeassistant/components/automation/strings.json @@ -37,12 +37,12 @@ }, "issues": { "service_not_found": { - "title": "{name} uses an unknown service", + "title": "{name} uses an unknown action", "fix_flow": { "step": { "confirm": { "title": "[%key:component::automation::issues::service_not_found::title%]", - "description": "The automation \"{name}\" (`{entity_id}`) has an action that calls an unknown service: `{service}`.\n\nThis error prevents the automation from running correctly. Maybe this service is no longer available, or perhaps a typo caused it.\n\nTo fix this error, [edit the automation]({edit}) and remove the action that calls this service.\n\nClick on SUBMIT below to confirm you have fixed this automation." + "description": "The automation \"{name}\" (`{entity_id}`) has an unknown action: `{service}`.\n\nThis error prevents the automation from running correctly. Maybe this action is no longer available, or perhaps a typo caused it.\n\nTo fix this error, [edit the automation]({edit}) and remove the action that calls this action.\n\nClick on SUBMIT below to confirm you have fixed this automation." } } } diff --git a/homeassistant/components/aws/manifest.json b/homeassistant/components/aws/manifest.json index afc1b4c6c64..6238bffce36 100644 --- a/homeassistant/components/aws/manifest.json +++ b/homeassistant/components/aws/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/aws", "iot_class": "cloud_push", "loggers": ["aiobotocore", "botocore"], - "requirements": ["aiobotocore==2.13.0"] + "requirements": ["aiobotocore==2.13.1", "botocore==1.34.131"] } diff --git a/homeassistant/components/azure_data_explorer/client.py b/homeassistant/components/azure_data_explorer/client.py index 88609ff8e10..9fb22dfe974 100644 --- a/homeassistant/components/azure_data_explorer/client.py +++ b/homeassistant/components/azure_data_explorer/client.py @@ -65,13 +65,18 @@ class AzureDataExplorerClient: ) if data[CONF_USE_QUEUED_CLIENT] is True: - # Queded is the only option supported on free tear of ADX + # Queued is the only option supported on free tier of ADX self.write_client = QueuedIngestClient(kcsb_ingest) else: self.write_client = ManagedStreamingIngestClient.from_dm_kcsb(kcsb_ingest) self.query_client = KustoClient(kcsb_query) + # Reduce the HTTP logging, the default INFO logging is too verbose. + logging.getLogger("azure.core.pipeline.policies.http_logging_policy").setLevel( + logging.WARNING + ) + def test_connection(self) -> None: """Test connection, will throw Exception if it cannot connect.""" @@ -80,7 +85,7 @@ class AzureDataExplorerClient: self.query_client.execute_query(self._database, query) def ingest_data(self, adx_events: str) -> None: - """Send data to Axure Data Explorer.""" + """Send data to Azure Data Explorer.""" bytes_stream = io.StringIO(adx_events) stream_descriptor = StreamDescriptor(bytes_stream) diff --git a/homeassistant/components/bayesian/manifest.json b/homeassistant/components/bayesian/manifest.json index 1d6dcd6080f..df1ab9c7609 100644 --- a/homeassistant/components/bayesian/manifest.json +++ b/homeassistant/components/bayesian/manifest.json @@ -3,6 +3,7 @@ "name": "Bayesian", "codeowners": ["@HarvsG"], "documentation": "https://www.home-assistant.io/integrations/bayesian", + "integration_type": "helper", "iot_class": "local_polling", "quality_scale": "internal" } diff --git a/homeassistant/components/blink/services.py b/homeassistant/components/blink/services.py index 298ead00a45..bb2cbf575dd 100644 --- a/homeassistant/components/blink/services.py +++ b/homeassistant/components/blink/services.py @@ -4,14 +4,13 @@ from __future__ import annotations import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_DEVICE_ID, CONF_PIN from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from homeassistant.helpers import config_validation as cv, device_registry as dr +from homeassistant.helpers import config_validation as cv from .const import ATTR_CONFIG_ENTRY_ID, DOMAIN, SERVICE_SEND_PIN -from .coordinator import BlinkUpdateCoordinator SERVICE_UPDATE_SCHEMA = vol.Schema( { @@ -29,45 +28,6 @@ SERVICE_SEND_PIN_SCHEMA = vol.Schema( def setup_services(hass: HomeAssistant) -> None: """Set up the services for the Blink integration.""" - def collect_coordinators( - device_ids: list[str], - ) -> list[BlinkUpdateCoordinator]: - config_entries: list[ConfigEntry] = [] - registry = dr.async_get(hass) - for target in device_ids: - device = registry.async_get(target) - if device: - device_entries: list[ConfigEntry] = [] - for entry_id in device.config_entries: - entry = hass.config_entries.async_get_entry(entry_id) - if entry and entry.domain == DOMAIN: - device_entries.append(entry) - if not device_entries: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="invalid_device", - translation_placeholders={"target": target, "domain": DOMAIN}, - ) - config_entries.extend(device_entries) - else: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="device_not_found", - translation_placeholders={"target": target}, - ) - - coordinators: list[BlinkUpdateCoordinator] = [] - for config_entry in config_entries: - if config_entry.state != ConfigEntryState.LOADED: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="not_loaded", - translation_placeholders={"target": config_entry.title}, - ) - - coordinators.append(hass.data[DOMAIN][config_entry.entry_id]) - return coordinators - async def send_pin(call: ServiceCall): """Call blink to send new pin.""" for entry_id in call.data[ATTR_CONFIG_ENTRY_ID]: diff --git a/homeassistant/components/blinksticklight/light.py b/homeassistant/components/blinksticklight/light.py index a789a7e0503..19ac5f80242 100644 --- a/homeassistant/components/blinksticklight/light.py +++ b/homeassistant/components/blinksticklight/light.py @@ -1,10 +1,11 @@ """Support for Blinkstick lights.""" +# mypy: ignore-errors from __future__ import annotations from typing import Any -from blinkstick import blinkstick +# from blinkstick import blinkstick import voluptuous as vol from homeassistant.components.light import ( diff --git a/homeassistant/components/blinksticklight/manifest.json b/homeassistant/components/blinksticklight/manifest.json index e3a6638f2a9..70fac896ff2 100644 --- a/homeassistant/components/blinksticklight/manifest.json +++ b/homeassistant/components/blinksticklight/manifest.json @@ -2,6 +2,7 @@ "domain": "blinksticklight", "name": "BlinkStick", "codeowners": [], + "disabled": "This integration is disabled because it uses non-open source code to operate.", "documentation": "https://www.home-assistant.io/integrations/blinksticklight", "iot_class": "local_polling", "loggers": ["blinkstick"], diff --git a/homeassistant/components/blinksticklight/ruff.toml b/homeassistant/components/blinksticklight/ruff.toml new file mode 100644 index 00000000000..1b02a0b8eaf --- /dev/null +++ b/homeassistant/components/blinksticklight/ruff.toml @@ -0,0 +1,5 @@ +extend = "../../../pyproject.toml" + +lint.extend-ignore = [ + "F821" +] \ No newline at end of file diff --git a/homeassistant/components/bmw_connected_drive/sensor.py b/homeassistant/components/bmw_connected_drive/sensor.py index 15e5c933578..fe0e835622b 100644 --- a/homeassistant/components/bmw_connected_drive/sensor.py +++ b/homeassistant/components/bmw_connected_drive/sensor.py @@ -131,7 +131,7 @@ SENSOR_TYPES: list[BMWSensorEntityDescription] = [ BMWSensorEntityDescription( key="fuel_and_battery.remaining_fuel", translation_key="remaining_fuel", - device_class=SensorDeviceClass.VOLUME, + device_class=SensorDeviceClass.VOLUME_STORAGE, native_unit_of_measurement=UnitOfVolume.LITERS, state_class=SensorStateClass.MEASUREMENT, suggested_display_precision=0, diff --git a/homeassistant/components/bring/strings.json b/homeassistant/components/bring/strings.json index 652958a1b1f..e3e700d75f9 100644 --- a/homeassistant/components/bring/strings.json +++ b/homeassistant/components/bring/strings.json @@ -49,7 +49,7 @@ "message": "Authentication failed for {email}, check your email and password" }, "notify_missing_argument_item": { - "message": "Failed to call service {service}. 'URGENT_MESSAGE' requires a value @ data['item']. Got None" + "message": "Failed to perform action {service}. 'URGENT_MESSAGE' requires a value @ data['item']. Got None" }, "notify_request_failed": { "message": "Failed to send push notification for bring due to a connection error, try again later" diff --git a/homeassistant/components/broadlink/const.py b/homeassistant/components/broadlink/const.py index 41c4964c2b3..c9b17128b79 100644 --- a/homeassistant/components/broadlink/const.py +++ b/homeassistant/components/broadlink/const.py @@ -6,7 +6,9 @@ DOMAIN = "broadlink" DOMAINS_AND_TYPES = { Platform.CLIMATE: {"HYS"}, + Platform.LIGHT: {"LB1", "LB2"}, Platform.REMOTE: {"RM4MINI", "RM4PRO", "RMMINI", "RMMINIB", "RMPRO"}, + Platform.SELECT: {"HYS"}, Platform.SENSOR: { "A1", "MP1S", @@ -35,7 +37,7 @@ DOMAINS_AND_TYPES = { "SP4", "SP4B", }, - Platform.LIGHT: {"LB1", "LB2"}, + Platform.TIME: {"HYS"}, } DEVICE_TYPES = set.union(*DOMAINS_AND_TYPES.values()) diff --git a/homeassistant/components/broadlink/select.py b/homeassistant/components/broadlink/select.py new file mode 100644 index 00000000000..6253adc308a --- /dev/null +++ b/homeassistant/components/broadlink/select.py @@ -0,0 +1,69 @@ +"""Support for Broadlink selects.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.components.select import SelectEntity +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import BroadlinkDevice +from .const import DOMAIN +from .entity import BroadlinkEntity + +DAY_ID_TO_NAME = { + 1: "monday", + 2: "tuesday", + 3: "wednesday", + 4: "thursday", + 5: "friday", + 6: "saturday", + 7: "sunday", +} +DAY_NAME_TO_ID = {v: k for k, v in DAY_ID_TO_NAME.items()} + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Broadlink select.""" + device = hass.data[DOMAIN].devices[config_entry.entry_id] + async_add_entities([BroadlinkDayOfWeek(device)]) + + +class BroadlinkDayOfWeek(BroadlinkEntity, SelectEntity): + """Representation of a Broadlink day of week.""" + + _attr_has_entity_name = True + _attr_current_option: str | None = None + _attr_options = list(DAY_NAME_TO_ID) + _attr_translation_key = "day_of_week" + + def __init__(self, device: BroadlinkDevice) -> None: + """Initialize the select.""" + super().__init__(device) + + self._attr_unique_id = f"{device.unique_id}-dayofweek" + + def _update_state(self, data: dict[str, Any]) -> None: + """Update the state of the entity.""" + if data is None or "dayofweek" not in data: + self._attr_current_option = None + else: + self._attr_current_option = DAY_ID_TO_NAME[data["dayofweek"]] + + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + await self._device.async_request( + self._device.api.set_time, + hour=self._coordinator.data["hour"], + minute=self._coordinator.data["min"], + second=self._coordinator.data["sec"], + day=DAY_NAME_TO_ID[option], + ) + self._attr_current_option = option + self.async_write_ha_state() diff --git a/homeassistant/components/broadlink/strings.json b/homeassistant/components/broadlink/strings.json index 335984d1ebe..5150a521363 100644 --- a/homeassistant/components/broadlink/strings.json +++ b/homeassistant/components/broadlink/strings.json @@ -61,6 +61,20 @@ "total_consumption": { "name": "Total consumption" } + }, + "select": { + "day_of_week": { + "name": "Day of week", + "state": { + "monday": "[%key:common::time::monday%]", + "tuesday": "[%key:common::time::tuesday%]", + "wednesday": "[%key:common::time::wednesday%]", + "thursday": "[%key:common::time::thursday%]", + "friday": "[%key:common::time::friday%]", + "saturday": "[%key:common::time::saturday%]", + "sunday": "[%key:common::time::sunday%]" + } + } } } } diff --git a/homeassistant/components/broadlink/time.py b/homeassistant/components/broadlink/time.py new file mode 100644 index 00000000000..3dcb045fead --- /dev/null +++ b/homeassistant/components/broadlink/time.py @@ -0,0 +1,63 @@ +"""Support for Broadlink device time.""" + +from __future__ import annotations + +from datetime import time +from typing import Any + +from homeassistant.components.time import TimeEntity +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import dt as dt_util + +from . import BroadlinkDevice +from .const import DOMAIN +from .entity import BroadlinkEntity + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Broadlink time.""" + device = hass.data[DOMAIN].devices[config_entry.entry_id] + async_add_entities([BroadlinkTime(device)]) + + +class BroadlinkTime(BroadlinkEntity, TimeEntity): + """Representation of a Broadlink device time.""" + + _attr_has_entity_name = True + _attr_native_value: time | None = None + + def __init__(self, device: BroadlinkDevice) -> None: + """Initialize the sensor.""" + super().__init__(device) + + self._attr_unique_id = f"{device.unique_id}-device_time" + + def _update_state(self, data: dict[str, Any]) -> None: + """Update the state of the entity.""" + if data is None or "hour" not in data or "min" not in data or "sec" not in data: + self._attr_native_value = None + else: + self._attr_native_value = time( + hour=data["hour"], + minute=data["min"], + second=data["sec"], + tzinfo=dt_util.get_default_time_zone(), + ) + + async def async_set_value(self, value: time) -> None: + """Change the value.""" + await self._device.async_request( + self._device.api.set_time, + hour=value.hour, + minute=value.minute, + second=value.second, + day=self._coordinator.data["dayofweek"], + ) + self._attr_native_value = value + self.async_write_ha_state() diff --git a/homeassistant/components/calendar/strings.json b/homeassistant/components/calendar/strings.json index 78b8407240c..83a7d01d8ae 100644 --- a/homeassistant/components/calendar/strings.json +++ b/homeassistant/components/calendar/strings.json @@ -111,12 +111,12 @@ }, "issues": { "deprecated_service_calendar_list_events": { - "title": "Detected use of deprecated service `calendar.list_events`", + "title": "Detected use of deprecated action `calendar.list_events`", "fix_flow": { "step": { "confirm": { "title": "[%key:component::calendar::issues::deprecated_service_calendar_list_events::title%]", - "description": "Use `calendar.get_events` instead which supports multiple entities.\n\nPlease replace this service and adjust your automations and scripts and select **submit** to close this issue." + "description": "Use `calendar.get_events` instead which supports multiple entities.\n\nPlease replace this action and adjust your automations and scripts and select **submit** to close this issue." } } } diff --git a/homeassistant/components/concord232/alarm_control_panel.py b/homeassistant/components/concord232/alarm_control_panel.py index 661a2beacc0..d3bafdeba4a 100644 --- a/homeassistant/components/concord232/alarm_control_panel.py +++ b/homeassistant/components/concord232/alarm_control_panel.py @@ -1,11 +1,12 @@ """Support for Concord232 alarm control panels.""" +# mypy: ignore-errors from __future__ import annotations import datetime import logging -from concord232 import client as concord232_client +# from concord232 import client as concord232_client import requests import voluptuous as vol diff --git a/homeassistant/components/concord232/binary_sensor.py b/homeassistant/components/concord232/binary_sensor.py index a1dcbc222f7..588e7681746 100644 --- a/homeassistant/components/concord232/binary_sensor.py +++ b/homeassistant/components/concord232/binary_sensor.py @@ -1,11 +1,12 @@ """Support for exposing Concord232 elements as sensors.""" +# mypy: ignore-errors from __future__ import annotations import datetime import logging -from concord232 import client as concord232_client +# from concord232 import client as concord232_client import requests import voluptuous as vol diff --git a/homeassistant/components/concord232/manifest.json b/homeassistant/components/concord232/manifest.json index 9c08c67a384..ef075ba5f96 100644 --- a/homeassistant/components/concord232/manifest.json +++ b/homeassistant/components/concord232/manifest.json @@ -2,6 +2,7 @@ "domain": "concord232", "name": "Concord232", "codeowners": [], + "disabled": "This integration is disabled because it uses non-open source code to operate.", "documentation": "https://www.home-assistant.io/integrations/concord232", "iot_class": "local_polling", "loggers": ["concord232", "stevedore"], diff --git a/homeassistant/components/concord232/ruff.toml b/homeassistant/components/concord232/ruff.toml new file mode 100644 index 00000000000..38f6f586aef --- /dev/null +++ b/homeassistant/components/concord232/ruff.toml @@ -0,0 +1,5 @@ +extend = "../../../pyproject.toml" + +lint.extend-ignore = [ + "F821" +] diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index 58170b37c6b..f308ae57647 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/conversation", "integration_type": "system", "quality_scale": "internal", - "requirements": ["hassil==1.7.3", "home-assistant-intents==2024.7.3"] + "requirements": ["hassil==1.7.4", "home-assistant-intents==2024.7.10"] } diff --git a/homeassistant/components/conversation/util.py b/homeassistant/components/conversation/util.py index b4ff2511ca1..4326c95cb66 100644 --- a/homeassistant/components/conversation/util.py +++ b/homeassistant/components/conversation/util.py @@ -34,4 +34,4 @@ def create_matcher(utterance: str) -> re.Pattern[str]: pattern.append(rf"(?:{optional_match.groups()[0]} *)?") pattern.append("$") - return re.compile("".join(pattern), re.I) + return re.compile("".join(pattern), re.IGNORECASE) diff --git a/homeassistant/components/doorbird/__init__.py b/homeassistant/components/doorbird/__init__.py index d232aa36cdb..8989e0ec0be 100644 --- a/homeassistant/components/doorbird/__init__.py +++ b/homeassistant/components/doorbird/__init__.py @@ -3,11 +3,9 @@ from __future__ import annotations from http import HTTPStatus -import logging -from typing import Any +from aiohttp import ClientResponseError from doorbirdpy import DoorBird -import requests from homeassistant.components import persistent_notification from homeassistant.const import ( @@ -18,7 +16,8 @@ from homeassistant.const import ( CONF_USERNAME, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.typing import ConfigType @@ -27,8 +26,6 @@ from .device import ConfiguredDoorBird from .models import DoorBirdConfigEntry, DoorBirdData from .view import DoorBirdRequestView -_LOGGER = logging.getLogger(__name__) - CONF_CUSTOM_URL = "hass_url_override" CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) @@ -48,36 +45,26 @@ async def async_setup_entry(hass: HomeAssistant, entry: DoorBirdConfigEntry) -> device_ip = door_station_config[CONF_HOST] username = door_station_config[CONF_USERNAME] password = door_station_config[CONF_PASSWORD] + session = async_get_clientsession(hass) - device = DoorBird(device_ip, username, password) + device = DoorBird(device_ip, username, password, http_session=session) try: - status, info = await hass.async_add_executor_job(_init_door_bird_device, device) - except requests.exceptions.HTTPError as err: - if err.response.status_code == HTTPStatus.UNAUTHORIZED: - _LOGGER.error( - "Authorization rejected by DoorBird for %s@%s", username, device_ip - ) - return False + info = await device.info() + except ClientResponseError as err: + if err.status == HTTPStatus.UNAUTHORIZED: + raise ConfigEntryAuthFailed from err raise ConfigEntryNotReady from err except OSError as oserr: - _LOGGER.error("Failed to setup doorbird at %s: %s", device_ip, oserr) raise ConfigEntryNotReady from oserr - if not status[0]: - _LOGGER.error( - "Could not connect to DoorBird as %s@%s: Error %s", - username, - device_ip, - str(status[1]), - ) - raise ConfigEntryNotReady - token: str = door_station_config.get(CONF_TOKEN, config_entry_id) custom_url: str | None = door_station_config.get(CONF_CUSTOM_URL) name: str | None = door_station_config.get(CONF_NAME) events = entry.options.get(CONF_EVENTS, []) event_entity_ids: dict[str, str] = {} - door_station = ConfiguredDoorBird(device, name, custom_url, token, event_entity_ids) + door_station = ConfiguredDoorBird( + hass, device, name, custom_url, token, event_entity_ids + ) door_bird_data = DoorBirdData(door_station, info, event_entity_ids) door_station.update_events(events) # Subscribe to doorbell or motion events @@ -91,11 +78,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: DoorBirdConfigEntry) -> return True -def _init_door_bird_device(device: DoorBird) -> tuple[tuple[bool, int], dict[str, Any]]: - """Verify we can connect to the device and return the status.""" - return device.ready(), device.info() - - async def async_unload_entry(hass: HomeAssistant, entry: DoorBirdConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) @@ -106,8 +88,8 @@ async def _async_register_events( ) -> bool: """Register events on device.""" try: - await hass.async_add_executor_job(door_station.register_events, hass) - except requests.exceptions.HTTPError: + await door_station.async_register_events() + except ClientResponseError: persistent_notification.async_create( hass, ( diff --git a/homeassistant/components/doorbird/button.py b/homeassistant/components/doorbird/button.py index b83ff966174..62631e51abc 100644 --- a/homeassistant/components/doorbird/button.py +++ b/homeassistant/components/doorbird/button.py @@ -1,14 +1,15 @@ -"""Support for powering relays in a DoorBird video doorbell.""" +"""Support for relays and actions in a DoorBird video doorbell.""" -from collections.abc import Callable -from dataclasses import dataclass - -from doorbirdpy import DoorBird +from collections.abc import Callable, Coroutine +from dataclasses import dataclass, replace +from typing import Any from homeassistant.components.button import ButtonEntity, ButtonEntityDescription +from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from .device import ConfiguredDoorBird, async_reset_device_favorites from .entity import DoorBirdEntity from .models import DoorBirdConfigEntry, DoorBirdData @@ -19,18 +20,25 @@ IR_RELAY = "__ir_light__" class DoorbirdButtonEntityDescription(ButtonEntityDescription): """Class to describe a Doorbird Button entity.""" - press_action: Callable[[DoorBird, str], None] + press_action: Callable[[ConfiguredDoorBird, str], Coroutine[Any, Any, bool | None]] RELAY_ENTITY_DESCRIPTION = DoorbirdButtonEntityDescription( key="relay", - translation_key="relay", - press_action=lambda device, relay: device.energize_relay(relay), + press_action=lambda door_station, relay: door_station.device.energize_relay(relay), ) -IR_ENTITY_DESCRIPTION = DoorbirdButtonEntityDescription( - key="ir", - translation_key="ir", - press_action=lambda device, _: device.turn_light_on(), +BUTTON_DESCRIPTIONS: tuple[DoorbirdButtonEntityDescription, ...] = ( + DoorbirdButtonEntityDescription( + key="__ir_light__", + translation_key="ir", + press_action=lambda door_station, _: door_station.device.turn_light_on(), + ), + DoorbirdButtonEntityDescription( + key="reset_favorites", + translation_key="reset_favorites", + press_action=lambda door_station, _: async_reset_device_favorites(door_station), + entity_category=EntityCategory.CONFIG, + ), ) @@ -41,38 +49,39 @@ async def async_setup_entry( ) -> None: """Set up the DoorBird button platform.""" door_bird_data = config_entry.runtime_data - relays = door_bird_data.door_station_info["RELAYS"] - + relays: list[str] = door_bird_data.door_station_info["RELAYS"] entities = [ - DoorBirdButton(door_bird_data, relay, RELAY_ENTITY_DESCRIPTION) + DoorBirdButton( + door_bird_data, + replace(RELAY_ENTITY_DESCRIPTION, name=f"Relay {relay}"), + relay, + ) for relay in relays ] - entities.append(DoorBirdButton(door_bird_data, IR_RELAY, IR_ENTITY_DESCRIPTION)) - + entities.extend( + DoorBirdButton(door_bird_data, button_description) + for button_description in BUTTON_DESCRIPTIONS + ) async_add_entities(entities) class DoorBirdButton(DoorBirdEntity, ButtonEntity): - """A relay in a DoorBird device.""" + """A button for a DoorBird device.""" entity_description: DoorbirdButtonEntityDescription def __init__( self, door_bird_data: DoorBirdData, - relay: str, entity_description: DoorbirdButtonEntityDescription, + relay: str | None = None, ) -> None: - """Initialize a relay in a DoorBird device.""" + """Initialize a button for a DoorBird device.""" super().__init__(door_bird_data) - self._relay = relay + self._relay = relay or "" self.entity_description = entity_description - if self._relay == IR_RELAY: - self._attr_name = "IR" - else: - self._attr_name = f"Relay {self._relay}" - self._attr_unique_id = f"{self._mac_addr}_{self._relay}" + self._attr_unique_id = f"{self._mac_addr}_{relay or entity_description.key}" - def press(self) -> None: - """Power the relay.""" - self.entity_description.press_action(self._door_station.device, self._relay) + async def async_press(self) -> None: + """Call the press action.""" + await self.entity_description.press_action(self._door_station, self._relay) diff --git a/homeassistant/components/doorbird/camera.py b/homeassistant/components/doorbird/camera.py index 8ab7f748f4a..640d6630c18 100644 --- a/homeassistant/components/doorbird/camera.py +++ b/homeassistant/components/doorbird/camera.py @@ -2,7 +2,6 @@ from __future__ import annotations -import asyncio import datetime import logging @@ -10,7 +9,6 @@ import aiohttp from homeassistant.components.camera import Camera, CameraEntityFeature from homeassistant.core import HomeAssistant -from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.dt as dt_util @@ -95,11 +93,9 @@ class DoorBirdCamera(DoorBirdEntity, Camera): return self._last_image try: - websession = async_get_clientsession(self.hass) - async with asyncio.timeout(_TIMEOUT): - response = await websession.get(self._url) - - self._last_image = await response.read() + self._last_image = await self._door_station.device.get_image( + self._url, timeout=_TIMEOUT + ) except TimeoutError: _LOGGER.error("DoorBird %s: Camera image timed out", self.name) return self._last_image diff --git a/homeassistant/components/doorbird/config_flow.py b/homeassistant/components/doorbird/config_flow.py index b59c03ac565..31204a6663b 100644 --- a/homeassistant/components/doorbird/config_flow.py +++ b/homeassistant/components/doorbird/config_flow.py @@ -2,12 +2,13 @@ from __future__ import annotations +from collections.abc import Mapping from http import HTTPStatus import logging from typing import Any +from aiohttp import ClientResponseError from doorbirdpy import DoorBird -import requests import voluptuous as vol from homeassistant.components import zeroconf @@ -20,12 +21,29 @@ from homeassistant.config_entries import ( from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.typing import VolDictType -from .const import CONF_EVENTS, DOMAIN, DOORBIRD_OUI +from .const import ( + CONF_EVENTS, + DEFAULT_DOORBELL_EVENT, + DEFAULT_MOTION_EVENT, + DOMAIN, + DOORBIRD_OUI, +) from .util import get_mac_address_from_door_station_info _LOGGER = logging.getLogger(__name__) +DEFAULT_OPTIONS = {CONF_EVENTS: [DEFAULT_DOORBELL_EVENT, DEFAULT_MOTION_EVENT]} + + +AUTH_VOL_DICT: VolDictType = { + vol.Required(CONF_USERNAME): str, + vol.Required(CONF_PASSWORD): str, +} +AUTH_SCHEMA = vol.Schema(AUTH_VOL_DICT) + def _schema_with_defaults( host: str | None = None, name: str | None = None @@ -33,33 +51,27 @@ def _schema_with_defaults( return vol.Schema( { vol.Required(CONF_HOST, default=host): str, - vol.Required(CONF_USERNAME): str, - vol.Required(CONF_PASSWORD): str, + **AUTH_VOL_DICT, vol.Optional(CONF_NAME, default=name): str, } ) -def _check_device(device: DoorBird) -> tuple[tuple[bool, int], dict[str, Any]]: - """Verify we can connect to the device and return the status.""" - return device.ready(), device.info() - - async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, str]: """Validate the user input allows us to connect.""" - device = DoorBird(data[CONF_HOST], data[CONF_USERNAME], data[CONF_PASSWORD]) + session = async_get_clientsession(hass) + device = DoorBird( + data[CONF_HOST], data[CONF_USERNAME], data[CONF_PASSWORD], http_session=session + ) try: - status, info = await hass.async_add_executor_job(_check_device, device) - except requests.exceptions.HTTPError as err: - if err.response.status_code == HTTPStatus.UNAUTHORIZED: + info = await device.info() + except ClientResponseError as err: + if err.status == HTTPStatus.UNAUTHORIZED: raise InvalidAuth from err raise CannotConnect from err except OSError as err: raise CannotConnect from err - if not status[0]: - raise CannotConnect - mac_addr = get_mac_address_from_door_station_info(info) # Return info that you want to store in the config entry. @@ -68,11 +80,12 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, async def async_verify_supported_device(hass: HomeAssistant, host: str) -> bool: """Verify the doorbell state endpoint returns a 401.""" - device = DoorBird(host, "", "") + session = async_get_clientsession(hass) + device = DoorBird(host, "", "", http_session=session) try: - await hass.async_add_executor_job(device.doorbell_state) - except requests.exceptions.HTTPError as err: - if err.response.status_code == HTTPStatus.UNAUTHORIZED: + await device.doorbell_state() + except ClientResponseError as err: + if err.status == HTTPStatus.UNAUTHORIZED: return True except OSError: return False @@ -87,6 +100,47 @@ class DoorBirdConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize the DoorBird config flow.""" self.discovery_schema: vol.Schema | None = None + self.reauth_entry: ConfigEntry | None = None + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle reauth.""" + entry_id = self.context["entry_id"] + self.reauth_entry = self.hass.config_entries.async_get_entry(entry_id) + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reauth input.""" + errors: dict[str, str] = {} + existing_entry = self.reauth_entry + assert existing_entry + existing_data = existing_entry.data + placeholders: dict[str, str] = { + CONF_NAME: existing_data[CONF_NAME], + CONF_HOST: existing_data[CONF_HOST], + } + self.context["title_placeholders"] = placeholders + if user_input is not None: + new_config = { + **existing_data, + CONF_USERNAME: user_input[CONF_USERNAME], + CONF_PASSWORD: user_input[CONF_PASSWORD], + } + _, errors = await self._async_validate_or_error(new_config) + if not errors: + return self.async_update_reload_and_abort( + existing_entry, data=new_config + ) + + return self.async_show_form( + description_placeholders=placeholders, + step_id="reauth_confirm", + data_schema=AUTH_SCHEMA, + errors=errors, + ) async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -98,7 +152,9 @@ class DoorBirdConfigFlow(ConfigFlow, domain=DOMAIN): if not errors: await self.async_set_unique_id(info["mac_addr"]) self._abort_if_unique_id_configured() - return self.async_create_entry(title=info["title"], data=user_input) + return self.async_create_entry( + title=info["title"], data=user_input, options=DEFAULT_OPTIONS + ) data = self.discovery_schema or _schema_with_defaults() return self.async_show_form(step_id="user", data_schema=data, errors=errors) @@ -175,7 +231,6 @@ class OptionsFlowHandler(OptionsFlow): """Handle options flow.""" if user_input is not None: events = [event.strip() for event in user_input[CONF_EVENTS].split(",")] - return self.async_create_entry(title="", data={CONF_EVENTS: events}) current_events = self.config_entry.options.get(CONF_EVENTS, []) diff --git a/homeassistant/components/doorbird/const.py b/homeassistant/components/doorbird/const.py index 4985b9ac9ea..b4b9d6f3223 100644 --- a/homeassistant/components/doorbird/const.py +++ b/homeassistant/components/doorbird/const.py @@ -4,9 +4,6 @@ from homeassistant.const import Platform DOMAIN = "doorbird" PLATFORMS = [Platform.BUTTON, Platform.CAMERA, Platform.EVENT] -DOOR_STATION = "door_station" -DOOR_STATION_INFO = "door_station_info" -DOOR_STATION_EVENT_ENTITY_IDS = "door_station_event_entity_ids" CONF_EVENTS = "events" MANUFACTURER = "Bird Home Automation Group" @@ -22,3 +19,16 @@ DOORBIRD_INFO_KEY_WIFI_MAC_ADDR = "WIFI_MAC_ADDR" UNDO_UPDATE_LISTENER = "undo_update_listener" API_URL = f"/api/{DOMAIN}" + + +DEFAULT_DOORBELL_EVENT = "doorbell" +DEFAULT_MOTION_EVENT = "motion" + +DEFAULT_EVENT_TYPES = ( + (DEFAULT_DOORBELL_EVENT, "doorbell"), + (DEFAULT_MOTION_EVENT, "motion"), +) + +HTTP_EVENT_TYPE = "http" +MIN_WEEKDAY = 104400 +MAX_WEEKDAY = 104399 diff --git a/homeassistant/components/doorbird/device.py b/homeassistant/components/doorbird/device.py index f1ede43bbd4..9bb3397d0ff 100644 --- a/homeassistant/components/doorbird/device.py +++ b/homeassistant/components/doorbird/device.py @@ -2,19 +2,31 @@ from __future__ import annotations +from collections import defaultdict from dataclasses import dataclass from functools import cached_property import logging from typing import Any -from doorbirdpy import DoorBird, DoorBirdScheduleEntry +from doorbirdpy import ( + DoorBird, + DoorBirdScheduleEntry, + DoorBirdScheduleEntryOutput, + DoorBirdScheduleEntrySchedule, +) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.helpers.network import get_url from homeassistant.util import dt as dt_util, slugify -from .const import API_URL +from .const import ( + API_URL, + DEFAULT_EVENT_TYPES, + HTTP_EVENT_TYPE, + MAX_WEEKDAY, + MIN_WEEKDAY, +) _LOGGER = logging.getLogger(__name__) @@ -27,11 +39,21 @@ class DoorbirdEvent: event_type: str +@dataclass(slots=True) +class DoorbirdEventConfig: + """Describes the configuration of doorbird events.""" + + events: list[DoorbirdEvent] + schedule: list[DoorBirdScheduleEntry] + unconfigured_favorites: defaultdict[str, list[str]] + + class ConfiguredDoorBird: """Attach additional information to pass along with configured device.""" def __init__( self, + hass: HomeAssistant, device: DoorBird, name: str | None, custom_url: str | None, @@ -39,12 +61,15 @@ class ConfiguredDoorBird: event_entity_ids: dict[str, str], ) -> None: """Initialize configured device.""" + self._hass = hass self._name = name self._device = device self._custom_url = custom_url self._token = token self._event_entity_ids = event_entity_ids + # Raw events, ie "doorbell" or "motion" self.events: list[str] = [] + # Event names, ie "doorbird_1234_doorbell" or "doorbird_1234_motion" self.door_station_events: list[str] = [] self.event_descriptions: list[DoorbirdEvent] = [] @@ -75,35 +100,90 @@ class ConfiguredDoorBird: """Get token for device.""" return self._token - def register_events(self, hass: HomeAssistant) -> None: + async def async_register_events(self) -> None: + """Register events on device.""" + if not self.door_station_events: + # User may not have permission to get the favorites + return + + http_fav = await self._async_register_events() + event_config = await self._async_get_event_config(http_fav) + _LOGGER.debug("%s: Event config: %s", self.name, event_config) + if event_config.unconfigured_favorites: + await self._configure_unconfigured_favorites(event_config) + event_config = await self._async_get_event_config(http_fav) + self.event_descriptions = event_config.events + + async def _configure_unconfigured_favorites( + self, event_config: DoorbirdEventConfig + ) -> None: + """Configure unconfigured favorites.""" + for entry in event_config.schedule: + modified_schedule = False + for identifier in event_config.unconfigured_favorites.get(entry.input, ()): + schedule = DoorBirdScheduleEntrySchedule() + schedule.add_weekday(MIN_WEEKDAY, MAX_WEEKDAY) + entry.output.append( + DoorBirdScheduleEntryOutput( + enabled=True, + event=HTTP_EVENT_TYPE, + param=identifier, + schedule=schedule, + ) + ) + modified_schedule = True + + if modified_schedule: + update_ok, code = await self.device.change_schedule(entry) + if not update_ok: + _LOGGER.error( + "Unable to update schedule entry %s to %s. Error code: %s", + self.name, + entry.export, + code, + ) + + async def _async_register_events(self) -> dict[str, Any]: """Register events on device.""" # Override url if another is specified in the configuration if custom_url := self.custom_url: hass_url = custom_url else: # Get the URL of this server - hass_url = get_url(hass, prefer_external=False) + hass_url = get_url(self._hass, prefer_external=False) - if not self.door_station_events: - # User may not have permission to get the favorites - return + http_fav = await self._async_get_http_favorites() + if any( + # Note that a list comp is used here to ensure all + # events are registered and the any does not short circuit + [ + await self._async_register_event(hass_url, event, http_fav) + for event in self.door_station_events + ] + ): + # If any events were registered, get the updated favorites + http_fav = await self._async_get_http_favorites() - favorites = self.device.favorites() - for event in self.door_station_events: - if self._register_event(hass_url, event, favs=favorites): - _LOGGER.info( - "Successfully registered URL for %s on %s", event, self.name - ) + return http_fav - schedule: list[DoorBirdScheduleEntry] = self.device.schedule() - http_fav: dict[str, dict[str, Any]] = favorites.get("http") or {} - favorite_input_type: dict[str, str] = { + async def _async_get_event_config( + self, http_fav: dict[str, dict[str, Any]] + ) -> DoorbirdEventConfig: + """Get events and unconfigured favorites from http favorites.""" + device = self.device + schedule = await device.schedule() + favorite_input_type = { output.param: entry.input for entry in schedule for output in entry.output - if output.event == "http" + if output.event == HTTP_EVENT_TYPE } events: list[DoorbirdEvent] = [] + unconfigured_favorites: defaultdict[str, list[str]] = defaultdict(list) + default_event_types = { + self._get_event_name(event): event_type + for event, event_type in DEFAULT_EVENT_TYPES + } for identifier, data in http_fav.items(): title: str | None = data.get("title") if not title or not title.startswith("Home Assistant"): @@ -111,8 +191,10 @@ class ConfiguredDoorBird: event = title.split("(")[1].strip(")") if input_type := favorite_input_type.get(identifier): events.append(DoorbirdEvent(event, input_type)) + elif input_type := default_event_types.get(event): + unconfigured_favorites[input_type].append(identifier) - self.event_descriptions = events + return DoorbirdEventConfig(events, schedule, unconfigured_favorites) @cached_property def slug(self) -> str: @@ -122,46 +204,38 @@ class ConfiguredDoorBird: def _get_event_name(self, event: str) -> str: return f"{self.slug}_{event}" - def _register_event( - self, hass_url: str, event: str, favs: dict[str, Any] | None = None + async def _async_get_http_favorites(self) -> dict[str, dict[str, Any]]: + """Get the HTTP favorites from the device.""" + return (await self.device.favorites()).get(HTTP_EVENT_TYPE) or {} + + async def _async_register_event( + self, hass_url: str, event: str, http_fav: dict[str, dict[str, Any]] ) -> bool: - """Add a schedule entry in the device for a sensor.""" + """Register an event. + + Returns True if the event was registered, False if + the event was already registered or registration failed. + """ url = f"{hass_url}{API_URL}/{event}?token={self._token}" + _LOGGER.debug("Registering URL %s for event %s", url, event) + # If its already registered, don't register it again + if any(fav["value"] == url for fav in http_fav.values()): + _LOGGER.debug("URL already registered for %s", event) + return False - # Register HA URL as webhook if not already, then get the ID - if self.webhook_is_registered(url, favs=favs): - return True - - self.device.change_favorite("http", f"Home Assistant ({event})", url) - if not self.webhook_is_registered(url): + if not await self.device.change_favorite( + HTTP_EVENT_TYPE, f"Home Assistant ({event})", url + ): _LOGGER.warning( 'Unable to set favorite URL "%s". Event "%s" will not fire', url, event, ) return False + + _LOGGER.info("Successfully registered URL for %s on %s", event, self.name) return True - def webhook_is_registered( - self, url: str, favs: dict[str, Any] | None = None - ) -> bool: - """Return whether the given URL is registered as a device favorite.""" - return self.get_webhook_id(url, favs) is not None - - def get_webhook_id( - self, url: str, favs: dict[str, Any] | None = None - ) -> str | None: - """Return the device favorite ID for the given URL. - - The favorite must exist or there will be problems. - """ - favs = favs if favs else self.device.favorites() - http_fav: dict[str, dict[str, Any]] = favs.get("http") or {} - for fav_id, data in http_fav.items(): - if data["value"] == url: - return fav_id - return None - def get_event_data(self, event: str) -> dict[str, str | None]: """Get data to pass along with HA event.""" return { @@ -174,18 +248,11 @@ class ConfiguredDoorBird: } -async def async_reset_device_favorites( - hass: HomeAssistant, door_station: ConfiguredDoorBird -) -> None: +async def async_reset_device_favorites(door_station: ConfiguredDoorBird) -> None: """Handle clearing favorites on device.""" - await hass.async_add_executor_job(_reset_device_favorites, door_station) - - -def _reset_device_favorites(door_station: ConfiguredDoorBird) -> None: - """Handle clearing favorites on device.""" - # Clear webhooks door_bird = door_station.device - favorites: dict[str, list[str]] = door_bird.favorites() + favorites = await door_bird.favorites() for favorite_type, favorite_ids in favorites.items(): for favorite_id in favorite_ids: - door_bird.delete_favorite(favorite_type, favorite_id) + await door_bird.delete_favorite(favorite_type, favorite_id) + await door_station.async_register_events() diff --git a/homeassistant/components/doorbird/event.py b/homeassistant/components/doorbird/event.py index 39da279a3e0..4c20098fc80 100644 --- a/homeassistant/components/doorbird/event.py +++ b/homeassistant/components/doorbird/event.py @@ -7,7 +7,8 @@ from homeassistant.components.event import ( EventEntity, EventEntityDescription, ) -from homeassistant.core import Event, HomeAssistant, callback +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN @@ -70,14 +71,15 @@ class DoorBirdEventEntity(DoorBirdEntity, EventEntity): async def async_added_to_hass(self) -> None: """Subscribe to device events.""" self.async_on_remove( - self.hass.bus.async_listen( + async_dispatcher_connect( + self.hass, f"{DOMAIN}_{self._doorbird_event.event}", self._async_handle_event, ) ) @callback - def _async_handle_event(self, event: Event) -> None: + def _async_handle_event(self) -> None: """Handle a device event.""" event_types = self.entity_description.event_types if TYPE_CHECKING: diff --git a/homeassistant/components/doorbird/manifest.json b/homeassistant/components/doorbird/manifest.json index 2bb981ab06f..e77f9aaf0a4 100644 --- a/homeassistant/components/doorbird/manifest.json +++ b/homeassistant/components/doorbird/manifest.json @@ -7,7 +7,7 @@ "documentation": "https://www.home-assistant.io/integrations/doorbird", "iot_class": "local_push", "loggers": ["doorbirdpy"], - "requirements": ["DoorBirdPy==2.1.0"], + "requirements": ["DoorBirdPy==3.0.2"], "zeroconf": [ { "type": "_axis-video._tcp.local.", diff --git a/homeassistant/components/doorbird/strings.json b/homeassistant/components/doorbird/strings.json index 7bb55739fcf..29c85ec7311 100644 --- a/homeassistant/components/doorbird/strings.json +++ b/homeassistant/components/doorbird/strings.json @@ -23,12 +23,20 @@ "data_description": { "host": "The hostname or IP address of your DoorBird device." } + }, + "reauth_confirm": { + "description": "Re-authenticate DoorBird device {name} at {host}", + "data": { + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + } } }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "link_local_address": "Link local addresses are not supported", - "not_doorbird_device": "This device is not a DoorBird" + "not_doorbird_device": "This device is not a DoorBird", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" }, "flow_title": "{name} ({host})", "error": { @@ -38,6 +46,14 @@ } }, "entity": { + "button": { + "reset_favorites": { + "name": "Reset favorites" + }, + "ir": { + "name": "IR" + } + }, "camera": { "live": { "name": "live" diff --git a/homeassistant/components/doorbird/view.py b/homeassistant/components/doorbird/view.py index e1fa8e7cfbb..77b84bf4f3b 100644 --- a/homeassistant/components/doorbird/view.py +++ b/homeassistant/components/doorbird/view.py @@ -7,9 +7,9 @@ from http import HTTPStatus from aiohttp import web from homeassistant.components.http import KEY_HASS, HomeAssistantView +from homeassistant.helpers.dispatcher import async_dispatcher_send from .const import API_URL, DOMAIN -from .device import async_reset_device_favorites from .util import get_door_station_by_token @@ -38,11 +38,6 @@ class DoorBirdRequestView(HomeAssistantView): else: event_data = {} - if event == "clear": - await async_reset_device_favorites(hass, door_station) - message = f"HTTP Favorites cleared for {door_station.slug}" - return web.Response(text=message) - # # This integration uses a multiple different events. # It would be a major breaking change to change this to @@ -51,5 +46,7 @@ class DoorBirdRequestView(HomeAssistantView): # Do not copy this pattern in the future # for any new integrations. # - hass.bus.async_fire(f"{DOMAIN}_{event}", event_data) + event_type = f"{DOMAIN}_{event}" + hass.bus.async_fire(event_type, event_data) + async_dispatcher_send(hass, event_type) return web.Response(text="OK") diff --git a/homeassistant/components/dovado/__init__.py b/homeassistant/components/dovado/__init__.py index e89fd4361a5..5f63bbd0b2b 100644 --- a/homeassistant/components/dovado/__init__.py +++ b/homeassistant/components/dovado/__init__.py @@ -1,9 +1,10 @@ """Support for Dovado router.""" +# mypy: ignore-errors from datetime import timedelta import logging -import dovado +# import dovado import voluptuous as vol from homeassistant.const import ( diff --git a/homeassistant/components/dovado/manifest.json b/homeassistant/components/dovado/manifest.json index 620830e7757..9a0fc46ad16 100644 --- a/homeassistant/components/dovado/manifest.json +++ b/homeassistant/components/dovado/manifest.json @@ -2,6 +2,7 @@ "domain": "dovado", "name": "Dovado", "codeowners": [], + "disabled": "This integration is disabled because it uses non-open source code to operate.", "documentation": "https://www.home-assistant.io/integrations/dovado", "iot_class": "local_polling", "requirements": ["dovado==0.4.1"] diff --git a/homeassistant/components/dovado/ruff.toml b/homeassistant/components/dovado/ruff.toml new file mode 100644 index 00000000000..38f6f586aef --- /dev/null +++ b/homeassistant/components/dovado/ruff.toml @@ -0,0 +1,5 @@ +extend = "../../../pyproject.toml" + +lint.extend-ignore = [ + "F821" +] diff --git a/homeassistant/components/dsmr/sensor.py b/homeassistant/components/dsmr/sensor.py index 7b2e916529a..d46b2777a34 100644 --- a/homeassistant/components/dsmr/sensor.py +++ b/homeassistant/components/dsmr/sensor.py @@ -16,7 +16,7 @@ from dsmr_parser.clients.rfxtrx_protocol import ( create_rfxtrx_dsmr_reader, create_rfxtrx_tcp_dsmr_reader, ) -from dsmr_parser.objects import DSMRObject +from dsmr_parser.objects import DSMRObject, Telegram import serial from homeassistant.components.sensor import ( @@ -380,7 +380,7 @@ SENSORS: tuple[DSMRSensorEntityDescription, ...] = ( def create_mbus_entity( - mbus: int, mtype: int, telegram: dict[str, DSMRObject] + mbus: int, mtype: int, telegram: Telegram ) -> DSMRSensorEntityDescription | None: """Create a new MBUS Entity.""" if ( @@ -478,7 +478,7 @@ def rename_old_gas_to_mbus( def create_mbus_entities( - hass: HomeAssistant, telegram: dict[str, DSMRObject], entry: ConfigEntry + hass: HomeAssistant, telegram: Telegram, entry: ConfigEntry ) -> list[DSMREntity]: """Create MBUS Entities.""" entities = [] @@ -523,7 +523,7 @@ async def async_setup_entry( add_entities_handler: Callable[..., None] | None @callback - def init_async_add_entities(telegram: dict[str, DSMRObject]) -> None: + def init_async_add_entities(telegram: Telegram) -> None: """Add the sensor entities after the first telegram was received.""" nonlocal add_entities_handler assert add_entities_handler is not None @@ -560,7 +560,7 @@ async def async_setup_entry( ) @Throttle(min_time_between_updates) - def update_entities_telegram(telegram: dict[str, DSMRObject] | None) -> None: + def update_entities_telegram(telegram: Telegram | None) -> None: """Update entities with latest telegram and trigger state update.""" nonlocal initialized # Make all device entities aware of new telegram @@ -709,7 +709,7 @@ class DSMREntity(SensorEntity): self, entity_description: DSMRSensorEntityDescription, entry: ConfigEntry, - telegram: dict[str, DSMRObject], + telegram: Telegram, device_class: SensorDeviceClass, native_unit_of_measurement: str | None, serial_id: str = "", @@ -720,7 +720,7 @@ class DSMREntity(SensorEntity): self._attr_device_class = device_class self._attr_native_unit_of_measurement = native_unit_of_measurement self._entry = entry - self.telegram: dict[str, DSMRObject] | None = telegram + self.telegram: Telegram | None = telegram device_serial = entry.data[CONF_SERIAL_ID] device_name = DEVICE_NAME_ELECTRICITY @@ -750,7 +750,7 @@ class DSMREntity(SensorEntity): self._attr_unique_id = f"{device_serial}_{entity_description.key}" @callback - def update_data(self, telegram: dict[str, DSMRObject] | None) -> None: + def update_data(self, telegram: Telegram | None) -> None: """Update data.""" self.telegram = telegram if self.hass and ( diff --git a/homeassistant/components/ecobee/strings.json b/homeassistant/components/ecobee/strings.json index 56cf6e9ebf0..5483ca2299d 100644 --- a/homeassistant/components/ecobee/strings.json +++ b/homeassistant/components/ecobee/strings.json @@ -171,12 +171,12 @@ }, "issues": { "migrate_aux_heat": { - "title": "Migration of Ecobee set_aux_heat service", + "title": "Migration of Ecobee set_aux_heat action", "fix_flow": { "step": { "confirm": { - "description": "The Ecobee `set_aux_heat` service has been migrated. A new `aux_heat_only` switch entity is available for each thermostat that supports a Heat Pump.\n\nUpdate any automations to use the new `aux_heat_only` switch entity. When this is done, fix this issue and restart Home Assistant.", - "title": "Disable legacy Ecobee set_aux_heat service" + "description": "The Ecobee `set_aux_heat` action has been migrated. A new `aux_heat_only` switch entity is available for each thermostat that supports a Heat Pump.\n\nUpdate any automations to use the new `aux_heat_only` switch entity. When this is done, fix this issue and restart Home Assistant.", + "title": "Disable legacy Ecobee set_aux_heat action" } } } diff --git a/homeassistant/components/energyzero/strings.json b/homeassistant/components/energyzero/strings.json index 9858838aff7..7788f4d4d8e 100644 --- a/homeassistant/components/energyzero/strings.json +++ b/homeassistant/components/energyzero/strings.json @@ -58,7 +58,7 @@ "fields": { "config_entry": { "name": "Config Entry", - "description": "The config entry to use for this service." + "description": "The config entry to use for this action." }, "incl_vat": { "name": "Including VAT", diff --git a/homeassistant/components/enigma2/__init__.py b/homeassistant/components/enigma2/__init__.py index 4e4f8bdb687..de8283a5533 100644 --- a/homeassistant/components/enigma2/__init__.py +++ b/homeassistant/components/enigma2/__init__.py @@ -16,6 +16,8 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_create_clientsession +from .const import CONF_SOURCE_BOUQUET + type Enigma2ConfigEntry = ConfigEntry[OpenWebIfDevice] PLATFORMS = [Platform.MEDIA_PLAYER] @@ -35,7 +37,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: Enigma2ConfigEntry) -> b hass, verify_ssl=entry.data[CONF_VERIFY_SSL], base_url=base_url ) - entry.runtime_data = OpenWebIfDevice(session) + entry.runtime_data = OpenWebIfDevice( + session, source_bouquet=entry.options.get(CONF_SOURCE_BOUQUET) + ) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True diff --git a/homeassistant/components/enigma2/manifest.json b/homeassistant/components/enigma2/manifest.json index ef08314e541..538cfb56388 100644 --- a/homeassistant/components/enigma2/manifest.json +++ b/homeassistant/components/enigma2/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["openwebif"], - "requirements": ["openwebifpy==4.2.4"] + "requirements": ["openwebifpy==4.2.5"] } diff --git a/homeassistant/components/enigma2/media_player.py b/homeassistant/components/enigma2/media_player.py index 63acdd8be72..86ed9652106 100644 --- a/homeassistant/components/enigma2/media_player.py +++ b/homeassistant/components/enigma2/media_player.py @@ -199,7 +199,8 @@ class Enigma2Device(MediaPlayerEntity): async def async_mute_volume(self, mute: bool) -> None: """Mute or unmute.""" - await self._device.toggle_mute() + if mute != self._device.status.muted: + await self._device.toggle_mute() async def async_select_source(self, source: str) -> None: """Select input source.""" diff --git a/homeassistant/components/esphome/entity.py b/homeassistant/components/esphome/entity.py index 8241d0f4563..6e02f8de869 100644 --- a/homeassistant/components/esphome/entity.py +++ b/homeassistant/components/esphome/entity.py @@ -190,13 +190,13 @@ class EsphomeEntity(Entity, Generic[_InfoT, _StateT]): ) -> None: """Initialize.""" self._entry_data = entry_data + assert entry_data.device_info is not None + device_info = entry_data.device_info + self._device_info = device_info self._on_entry_data_changed() self._key = entity_info.key self._state_type = state_type self._on_static_info_update(entity_info) - assert entry_data.device_info is not None - device_info = entry_data.device_info - self._device_info = device_info self._attr_device_info = DeviceInfo( connections={(dr.CONNECTION_NETWORK_MAC, device_info.mac_address)} ) @@ -288,6 +288,12 @@ class EsphomeEntity(Entity, Generic[_InfoT, _StateT]): entry_data = self._entry_data self._api_version = entry_data.api_version self._client = entry_data.client + if self._device_info.has_deep_sleep: + # During deep sleep the ESP will not be connectable (by design) + # For these cases, show it as available + self._attr_available = entry_data.expected_disconnect + else: + self._attr_available = entry_data.available @callback def _on_device_update(self) -> None: @@ -300,16 +306,6 @@ class EsphomeEntity(Entity, Generic[_InfoT, _StateT]): # through the next entity state packet. self.async_write_ha_state() - @property - def available(self) -> bool: - """Return if the entity is available.""" - if self._device_info.has_deep_sleep: - # During deep sleep the ESP will not be connectable (by design) - # For these cases, show it as available - return self._entry_data.expected_disconnect - - return self._entry_data.available - class EsphomeAssistEntity(Entity): """Define a base entity for Assist Pipeline entities.""" diff --git a/homeassistant/components/esphome/light.py b/homeassistant/components/esphome/light.py index 295f9365cd0..52f999afe4f 100644 --- a/homeassistant/components/esphome/light.py +++ b/homeassistant/components/esphome/light.py @@ -122,7 +122,7 @@ def _color_mode_to_ha(mode: int) -> str: return ColorMode.UNKNOWN # choose the color mode with the most bits set - candidates.sort(key=lambda key: bin(key[1]).count("1")) + candidates.sort(key=lambda key: key[1].bit_count()) return candidates[-1][0] @@ -146,7 +146,7 @@ def _least_complex_color_mode(color_modes: tuple[int, ...]) -> int: # popcount with bin() function because it appears # to be the best way: https://stackoverflow.com/a/9831671 color_modes_list = list(color_modes) - color_modes_list.sort(key=lambda mode: bin(mode).count("1")) + color_modes_list.sort(key=lambda mode: (mode).bit_count()) return color_modes_list[0] diff --git a/homeassistant/components/esphome/sensor.py b/homeassistant/components/esphome/sensor.py index 4c99463505f..0742bebed28 100644 --- a/homeassistant/components/esphome/sensor.py +++ b/homeassistant/components/esphome/sensor.py @@ -99,7 +99,7 @@ class EsphomeSensor(EsphomeEntity[SensorInfo, SensorState], SensorEntity): state = self._state if state.missing_state or not math.isfinite(state.state): return None - if self._attr_device_class == SensorDeviceClass.TIMESTAMP: + if self._attr_device_class is SensorDeviceClass.TIMESTAMP: return dt_util.utc_from_timestamp(state.state) return f"{state.state:.{self._static_info.accuracy_decimals}f}" diff --git a/homeassistant/components/esphome/strings.json b/homeassistant/components/esphome/strings.json index 205b0b10744..eb2e8f65b78 100644 --- a/homeassistant/components/esphome/strings.json +++ b/homeassistant/components/esphome/strings.json @@ -5,7 +5,7 @@ "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "mdns_missing_mac": "Missing MAC address in MDNS properties.", - "service_received": "Service received", + "service_received": "Action received", "mqtt_missing_mac": "Missing MAC address in MQTT properties.", "mqtt_missing_api": "Missing API port in MQTT properties.", "mqtt_missing_ip": "Missing IP address in MQTT properties." @@ -53,7 +53,7 @@ "step": { "init": { "data": { - "allow_service_calls": "Allow the device to make Home Assistant service calls." + "allow_service_calls": "Allow the device to perform Home Assistant actions." } } } @@ -102,8 +102,8 @@ "description": "The API password for ESPHome is deprecated and the use of an API encryption key is recommended instead.\n\nRemove the API password and add an encryption key to your ESPHome device to resolve this issue." }, "service_calls_not_allowed": { - "title": "{name} is not permitted to call Home Assistant services", - "description": "The ESPHome device attempted to make a Home Assistant service call, but this functionality is not enabled.\n\nIf you trust this device and want to allow it to make Home Assistant service calls, you can enable this functionality in the options flow." + "title": "{name} is not permitted to perform Home Assistant actions", + "description": "The ESPHome device attempted to perform a Home Assistant action, but this functionality is not enabled.\n\nIf you trust this device and want to allow it to perfom Home Assistant action, you can enable this functionality in the options flow." } } } diff --git a/homeassistant/components/filter/manifest.json b/homeassistant/components/filter/manifest.json index 950d3f3e665..4d9a8992036 100644 --- a/homeassistant/components/filter/manifest.json +++ b/homeassistant/components/filter/manifest.json @@ -4,6 +4,7 @@ "codeowners": ["@dgomes"], "dependencies": ["recorder"], "documentation": "https://www.home-assistant.io/integrations/filter", + "integration_type": "helper", "iot_class": "local_push", "quality_scale": "internal" } diff --git a/homeassistant/components/foscam/strings.json b/homeassistant/components/foscam/strings.json index de22006b274..285f0f5a780 100644 --- a/homeassistant/components/foscam/strings.json +++ b/homeassistant/components/foscam/strings.json @@ -28,7 +28,7 @@ "services": { "ptz": { "name": "PTZ", - "description": "Pan/Tilt service for Foscam camera.", + "description": "Pan/Tilt action for Foscam camera.", "fields": { "movement": { "name": "Movement", @@ -42,7 +42,7 @@ }, "ptz_preset": { "name": "PTZ preset", - "description": "PTZ Preset service for Foscam camera.", + "description": "PTZ Preset action for Foscam camera.", "fields": { "preset_name": { "name": "Preset name", diff --git a/homeassistant/components/fritz/strings.json b/homeassistant/components/fritz/strings.json index eb47f76f27e..3b6c60ed48f 100644 --- a/homeassistant/components/fritz/strings.json +++ b/homeassistant/components/fritz/strings.json @@ -165,10 +165,10 @@ }, "exceptions": { "config_entry_not_found": { - "message": "Failed to call service \"{service}\". Config entry for target not found" + "message": "Failed to perform action \"{service}\". Config entry for target not found" }, - "service_parameter_unknown": { "message": "Service or parameter unknown" }, - "service_not_supported": { "message": "Service not supported" }, + "service_parameter_unknown": { "message": "Action or parameter unknown" }, + "service_not_supported": { "message": "Action not supported" }, "error_refresh_hosts_info": { "message": "Error refreshing hosts info" }, diff --git a/homeassistant/components/fritzbox_callmonitor/sensor.py b/homeassistant/components/fritzbox_callmonitor/sensor.py index 9cd37411698..668369c35a7 100644 --- a/homeassistant/components/fritzbox_callmonitor/sensor.py +++ b/homeassistant/components/fritzbox_callmonitor/sensor.py @@ -102,6 +102,7 @@ class FritzBoxCallSensor(SensorEntity): self._attr_unique_id = unique_id self._attr_native_value = CallState.IDLE self._attr_device_info = DeviceInfo( + configuration_url=self._fritzbox_phonebook.fph.fc.address, identifiers={(DOMAIN, unique_id)}, manufacturer=MANUFACTURER, model=self._fritzbox_phonebook.fph.modelname, diff --git a/homeassistant/components/generic_hygrostat/config_flow.py b/homeassistant/components/generic_hygrostat/config_flow.py index cade566968d..7c35b0e9317 100644 --- a/homeassistant/components/generic_hygrostat/config_flow.py +++ b/homeassistant/components/generic_hygrostat/config_flow.py @@ -7,9 +7,9 @@ from typing import Any, cast import voluptuous as vol +from homeassistant.components import fan, switch from homeassistant.components.humidifier import HumidifierDeviceClass from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN, SensorDeviceClass -from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import CONF_NAME, PERCENTAGE from homeassistant.helpers import selector from homeassistant.helpers.schema_config_entry_flow import ( @@ -45,7 +45,7 @@ OPTIONS_SCHEMA = { ) ), vol.Required(CONF_HUMIDIFIER): selector.EntitySelector( - selector.EntitySelectorConfig(domain=SWITCH_DOMAIN) + selector.EntitySelectorConfig(domain=[switch.DOMAIN, fan.DOMAIN]) ), vol.Required( CONF_DRY_TOLERANCE, default=DEFAULT_TOLERANCE diff --git a/homeassistant/components/generic_thermostat/config_flow.py b/homeassistant/components/generic_thermostat/config_flow.py index f1fe1ecfe25..29e3d69c2da 100644 --- a/homeassistant/components/generic_thermostat/config_flow.py +++ b/homeassistant/components/generic_thermostat/config_flow.py @@ -7,8 +7,8 @@ from typing import Any, cast import voluptuous as vol +from homeassistant.components import fan, switch from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN, SensorDeviceClass -from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import CONF_NAME, DEGREE from homeassistant.helpers import selector from homeassistant.helpers.schema_config_entry_flow import ( @@ -38,7 +38,7 @@ OPTIONS_SCHEMA = { ) ), vol.Required(CONF_HEATER): selector.EntitySelector( - selector.EntitySelectorConfig(domain=SWITCH_DOMAIN) + selector.EntitySelectorConfig(domain=[fan.DOMAIN, switch.DOMAIN]) ), vol.Required( CONF_COLD_TOLERANCE, default=DEFAULT_TOLERANCE diff --git a/homeassistant/components/google_assistant/strings.json b/homeassistant/components/google_assistant/strings.json index 8ef77f8d8c3..70fac8db6c1 100644 --- a/homeassistant/components/google_assistant/strings.json +++ b/homeassistant/components/google_assistant/strings.json @@ -13,7 +13,7 @@ "fields": { "agent_user_id": { "name": "Agent user ID", - "description": "Only needed for automations. Specific Home Assistant user id (not username, ID in configuration > users > under username) to sync with Google Assistant. Do not need when you call this service through Home Assistant front end or API. Used in automation script or other place where context.user_id is missing." + "description": "Only needed for automations. Specific Home Assistant user id (not username, ID in configuration > users > under username) to sync with Google Assistant. Do not need when you use this action through Home Assistant front end or API. Used in automation script or other place where context.user_id is missing." } } } diff --git a/homeassistant/components/google_cloud/helpers.py b/homeassistant/components/google_cloud/helpers.py index 97fa66b1f18..66dfbcf01eb 100644 --- a/homeassistant/components/google_cloud/helpers.py +++ b/homeassistant/components/google_cloud/helpers.py @@ -2,6 +2,8 @@ from __future__ import annotations +import functools +import operator from types import MappingProxyType from typing import Any @@ -74,7 +76,7 @@ def tts_options_schema( ): SelectSelector( SelectSelectorConfig( mode=SelectSelectorMode.DROPDOWN, - options=["", *sum(voices.values(), [])], + options=["", *functools.reduce(operator.iadd, voices.values(), [])], ) ), vol.Optional( diff --git a/homeassistant/components/google_sheets/__init__.py b/homeassistant/components/google_sheets/__init__.py index fc104cc5c22..3f34b23d522 100644 --- a/homeassistant/components/google_sheets/__init__.py +++ b/homeassistant/components/google_sheets/__init__.py @@ -41,7 +41,7 @@ SHEET_SERVICE_SCHEMA = vol.All( { vol.Required(DATA_CONFIG_ENTRY): ConfigEntrySelector(), vol.Optional(WORKSHEET): cv.string, - vol.Required(DATA): dict, + vol.Required(DATA): vol.Any(cv.ensure_list, [dict]), }, ) @@ -108,15 +108,19 @@ async def async_setup_service(hass: HomeAssistant) -> None: raise HomeAssistantError("Failed to write data") from ex worksheet = sheet.worksheet(call.data.get(WORKSHEET, sheet.sheet1.title)) - row_data = {"created": str(datetime.now())} | call.data[DATA] columns: list[str] = next(iter(worksheet.get_values("A1:ZZ1")), []) - row = [row_data.get(column, "") for column in columns] - for key, value in row_data.items(): - if key not in columns: - columns.append(key) - worksheet.update_cell(1, len(columns), key) - row.append(value) - worksheet.append_row(row, value_input_option=ValueInputOption.user_entered) + now = str(datetime.now()) + rows = [] + for d in call.data[DATA]: + row_data = {"created": now} | d + row = [row_data.get(column, "") for column in columns] + for key, value in row_data.items(): + if key not in columns: + columns.append(key) + worksheet.update_cell(1, len(columns), key) + row.append(value) + rows.append(row) + worksheet.append_rows(rows, value_input_option=ValueInputOption.user_entered) async def append_to_sheet(call: ServiceCall) -> None: """Append new line of data to a Google Sheets document.""" diff --git a/homeassistant/components/google_sheets/strings.json b/homeassistant/components/google_sheets/strings.json index e498e36723e..0723456224f 100644 --- a/homeassistant/components/google_sheets/strings.json +++ b/homeassistant/components/google_sheets/strings.json @@ -48,7 +48,7 @@ }, "data": { "name": "Data", - "description": "Data to be appended to the worksheet. This puts the values on a new row underneath the matching column (key). Any new key is placed on the top of a new column." + "description": "Data to be appended to the worksheet. This puts the values on new rows underneath the matching column (key). Any new key is placed on the top of a new column." } } } diff --git a/homeassistant/components/govee_ble/manifest.json b/homeassistant/components/govee_ble/manifest.json index 858e916d2d8..c48fa8f5251 100644 --- a/homeassistant/components/govee_ble/manifest.json +++ b/homeassistant/components/govee_ble/manifest.json @@ -14,6 +14,26 @@ "local_name": "B5178*", "connectable": false }, + { + "local_name": "GV5121*", + "connectable": false + }, + { + "local_name": "GV5122*", + "connectable": false + }, + { + "local_name": "GV5123*", + "connectable": false + }, + { + "local_name": "GV5125*", + "connectable": false + }, + { + "local_name": "GV5126*", + "connectable": false + }, { "manufacturer_id": 1, "service_uuid": "0000ec88-0000-1000-8000-00805f9b34fb", @@ -83,6 +103,10 @@ "manufacturer_id": 19506, "service_uuid": "00001801-0000-1000-8000-00805f9b34fb", "connectable": false + }, + { + "manufacturer_id": 61320, + "connectable": false } ], "codeowners": ["@bdraco", "@PierreAronnax"], @@ -90,5 +114,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/govee_ble", "iot_class": "local_push", - "requirements": ["govee-ble==0.31.3"] + "requirements": ["govee-ble==0.33.0"] } diff --git a/homeassistant/components/group/button.py b/homeassistant/components/group/button.py new file mode 100644 index 00000000000..d8481686615 --- /dev/null +++ b/homeassistant/components/group/button.py @@ -0,0 +1,131 @@ +"""Platform allowing several button entities to be grouped into one single button.""" + +from __future__ import annotations + +from typing import Any + +import voluptuous as vol + +from homeassistant.components.button import ( + DOMAIN, + PLATFORM_SCHEMA as BUTTON_PLATFORM_SCHEMA, + SERVICE_PRESS, + ButtonEntity, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ( + ATTR_ENTITY_ID, + CONF_ENTITIES, + CONF_NAME, + CONF_UNIQUE_ID, + STATE_UNAVAILABLE, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import config_validation as cv, entity_registry as er +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType + +from .entity import GroupEntity + +DEFAULT_NAME = "Button group" + +# No limit on parallel updates to enable a group calling another group +PARALLEL_UPDATES = 0 + +PLATFORM_SCHEMA = BUTTON_PLATFORM_SCHEMA.extend( + { + vol.Required(CONF_ENTITIES): cv.entities_domain(DOMAIN), + vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, + vol.Optional(CONF_UNIQUE_ID): cv.string, + } +) + + +async def async_setup_platform( + _: HomeAssistant, + config: ConfigType, + async_add_entities: AddEntitiesCallback, + __: DiscoveryInfoType | None = None, +) -> None: + """Set up the button group platform.""" + async_add_entities( + [ + ButtonGroup( + config.get(CONF_UNIQUE_ID), + config[CONF_NAME], + config[CONF_ENTITIES], + ) + ] + ) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Initialize button group config entry.""" + registry = er.async_get(hass) + entities = er.async_validate_entity_ids( + registry, config_entry.options[CONF_ENTITIES] + ) + async_add_entities( + [ + ButtonGroup( + config_entry.entry_id, + config_entry.title, + entities, + ) + ] + ) + + +@callback +def async_create_preview_button( + hass: HomeAssistant, name: str, validated_config: dict[str, Any] +) -> ButtonGroup: + """Create a preview button.""" + return ButtonGroup( + None, + name, + validated_config[CONF_ENTITIES], + ) + + +class ButtonGroup(GroupEntity, ButtonEntity): + """Representation of an button group.""" + + _attr_available = False + _attr_should_poll = False + + def __init__( + self, + unique_id: str | None, + name: str, + entity_ids: list[str], + ) -> None: + """Initialize a button group.""" + self._entity_ids = entity_ids + self._attr_name = name + self._attr_extra_state_attributes = {ATTR_ENTITY_ID: entity_ids} + self._attr_unique_id = unique_id + + async def async_press(self) -> None: + """Forward the press to all buttons in the group.""" + await self.hass.services.async_call( + DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: self._entity_ids}, + blocking=True, + context=self._context, + ) + + @callback + def async_update_group_state(self) -> None: + """Query all members and determine the button group state.""" + # Set group as unavailable if all members are unavailable or missing + self._attr_available = any( + state.state != STATE_UNAVAILABLE + for entity_id in self._entity_ids + if (state := self.hass.states.get(entity_id)) is not None + ) diff --git a/homeassistant/components/group/config_flow.py b/homeassistant/components/group/config_flow.py index 4eb0f1cdd52..54ef7d0626f 100644 --- a/homeassistant/components/group/config_flow.py +++ b/homeassistant/components/group/config_flow.py @@ -23,6 +23,7 @@ from homeassistant.helpers.schema_config_entry_flow import ( ) from .binary_sensor import CONF_ALL, async_create_preview_binary_sensor +from .button import async_create_preview_button from .const import CONF_HIDE_MEMBERS, CONF_IGNORE_NON_NUMERIC, DOMAIN from .cover import async_create_preview_cover from .entity import GroupEntity @@ -146,6 +147,7 @@ async def light_switch_options_schema( GROUP_TYPES = [ "binary_sensor", + "button", "cover", "event", "fan", @@ -185,6 +187,11 @@ CONFIG_FLOW = { preview="group", validate_user_input=set_group_type("binary_sensor"), ), + "button": SchemaFlowFormStep( + basic_group_config_schema("button"), + preview="group", + validate_user_input=set_group_type("button"), + ), "cover": SchemaFlowFormStep( basic_group_config_schema("cover"), preview="group", @@ -234,6 +241,10 @@ OPTIONS_FLOW = { binary_sensor_options_schema, preview="group", ), + "button": SchemaFlowFormStep( + partial(basic_group_options_schema, "button"), + preview="group", + ), "cover": SchemaFlowFormStep( partial(basic_group_options_schema, "cover"), preview="group", @@ -275,6 +286,7 @@ CREATE_PREVIEW_ENTITY: dict[ Callable[[HomeAssistant, str, dict[str, Any]], GroupEntity | MediaPlayerGroup], ] = { "binary_sensor": async_create_preview_binary_sensor, + "button": async_create_preview_button, "cover": async_create_preview_cover, "event": async_create_preview_event, "fan": async_create_preview_fan, diff --git a/homeassistant/components/group/strings.json b/homeassistant/components/group/strings.json index bff1f1e22ec..dc850804d94 100644 --- a/homeassistant/components/group/strings.json +++ b/homeassistant/components/group/strings.json @@ -7,6 +7,7 @@ "description": "Groups allow you to create a new entity that represents multiple entities of the same type.", "menu_options": { "binary_sensor": "Binary sensor group", + "button": "Button group", "cover": "Cover group", "event": "Event group", "fan": "Fan group", @@ -27,6 +28,14 @@ "name": "[%key:common::config_flow::data::name%]" } }, + "button": { + "title": "[%key:component::group::config::step::user::title%]", + "data": { + "entities": "[%key:component::group::config::step::binary_sensor::data::entities%]", + "hide_members": "[%key:component::group::config::step::binary_sensor::data::hide_members%]", + "name": "[%key:common::config_flow::data::name%]" + } + }, "cover": { "title": "[%key:component::group::config::step::user::title%]", "data": { @@ -109,6 +118,12 @@ "hide_members": "[%key:component::group::config::step::binary_sensor::data::hide_members%]" } }, + "button": { + "data": { + "entities": "[%key:component::group::config::step::binary_sensor::data::entities%]", + "hide_members": "[%key:component::group::config::step::binary_sensor::data::hide_members%]" + } + }, "cover": { "data": { "entities": "[%key:component::group::config::step::binary_sensor::data::entities%]", diff --git a/homeassistant/components/habitica/strings.json b/homeassistant/components/habitica/strings.json index a2c93391503..4ce507afffd 100644 --- a/homeassistant/components/habitica/strings.json +++ b/homeassistant/components/habitica/strings.json @@ -11,7 +11,7 @@ "user": { "data": { "url": "[%key:common::config_flow::data::url%]", - "name": "Override for Habitica’s username. Will be used for service calls", + "name": "Override for Habitica’s username. Will be used for actions", "api_user": "Habitica’s API user ID", "api_key": "[%key:common::config_flow::data::api_key%]" }, diff --git a/homeassistant/components/habitica/todo.py b/homeassistant/components/habitica/todo.py index 55465c87c8c..ab458f9f59f 100644 --- a/homeassistant/components/habitica/todo.py +++ b/homeassistant/components/habitica/todo.py @@ -127,6 +127,7 @@ class BaseHabiticaListEntity(HabiticaBase, TodoListEntity): if ( self.entity_description.key is HabiticaTodoList.TODOS + and item.due is not None ): # Only todos support a due date. date = item.due.isoformat() else: @@ -149,14 +150,14 @@ class BaseHabiticaListEntity(HabiticaBase, TodoListEntity): # Score up or down if item status changed if ( current_item.status is TodoItemStatus.NEEDS_ACTION - and item.status is TodoItemStatus.COMPLETED + and item.status == TodoItemStatus.COMPLETED ): score_result = ( await self.coordinator.api.tasks[item.uid].score["up"].post() ) elif ( current_item.status is TodoItemStatus.COMPLETED - and item.status is TodoItemStatus.NEEDS_ACTION + and item.status == TodoItemStatus.NEEDS_ACTION ): score_result = ( await self.coordinator.api.tasks[item.uid].score["down"].post() diff --git a/homeassistant/components/hassio/strings.json b/homeassistant/components/hassio/strings.json index 6b81b87e195..7c3aa70b559 100644 --- a/homeassistant/components/hassio/strings.json +++ b/homeassistant/components/hassio/strings.json @@ -289,7 +289,7 @@ }, "addon_update": { "name": "Update add-on.", - "description": "Updates an add-on. This service should be used with caution since add-on updates can contain breaking changes. It is highly recommended that you review release notes/change logs before updating an add-on.", + "description": "Updates an add-on. This action should be used with caution since add-on updates can contain breaking changes. It is highly recommended that you review release notes/change logs before updating an add-on.", "fields": { "addon": { "name": "[%key:component::hassio::services::addon_start::fields::addon::name%]", diff --git a/homeassistant/components/history_stats/__init__.py b/homeassistant/components/history_stats/__init__.py index 476b17f581f..dcca10d73e9 100644 --- a/homeassistant/components/history_stats/__init__.py +++ b/homeassistant/components/history_stats/__init__.py @@ -7,6 +7,9 @@ from datetime import timedelta from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ENTITY_ID, CONF_STATE from homeassistant.core import HomeAssistant +from homeassistant.helpers.device import ( + async_remove_stale_devices_links_keep_entity_device, +) from homeassistant.helpers.template import Template from .const import CONF_DURATION, CONF_END, CONF_START, PLATFORMS @@ -42,6 +45,12 @@ async def async_setup_entry( await coordinator.async_config_entry_first_refresh() entry.runtime_data = coordinator + async_remove_stale_devices_links_keep_entity_device( + hass, + entry.entry_id, + entry.options[CONF_ENTITY_ID], + ) + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) entry.async_on_unload(entry.add_update_listener(update_listener)) diff --git a/homeassistant/components/history_stats/sensor.py b/homeassistant/components/history_stats/sensor.py index a5139a8e9d6..99e953ff9dd 100644 --- a/homeassistant/components/history_stats/sensor.py +++ b/homeassistant/components/history_stats/sensor.py @@ -26,6 +26,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import PlatformNotReady import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.device import async_device_info_to_link_from_entity from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.reload import async_setup_reload_service from homeassistant.helpers.template import Template @@ -111,7 +112,9 @@ async def async_setup_platform( await coordinator.async_refresh() if not coordinator.last_update_success: raise PlatformNotReady from coordinator.last_exception - async_add_entities([HistoryStatsSensor(coordinator, sensor_type, name, unique_id)]) + async_add_entities( + [HistoryStatsSensor(hass, coordinator, sensor_type, name, unique_id, entity_id)] + ) async def async_setup_entry( @@ -123,8 +126,13 @@ async def async_setup_entry( sensor_type: str = entry.options[CONF_TYPE] coordinator = entry.runtime_data + entity_id: str = entry.options[CONF_ENTITY_ID] async_add_entities( - [HistoryStatsSensor(coordinator, sensor_type, entry.title, entry.entry_id)] + [ + HistoryStatsSensor( + hass, coordinator, sensor_type, entry.title, entry.entry_id, entity_id + ) + ] ) @@ -167,16 +175,22 @@ class HistoryStatsSensor(HistoryStatsSensorBase): def __init__( self, + hass: HomeAssistant, coordinator: HistoryStatsUpdateCoordinator, sensor_type: str, name: str, unique_id: str | None, + source_entity_id: str, ) -> None: """Initialize the HistoryStats sensor.""" super().__init__(coordinator, name) self._attr_native_unit_of_measurement = UNITS[sensor_type] self._type = sensor_type self._attr_unique_id = unique_id + self._attr_device_info = async_device_info_to_link_from_entity( + hass, + source_entity_id, + ) self._process_update() if self._type == CONF_TYPE_TIME: self._attr_device_class = SensorDeviceClass.DURATION diff --git a/homeassistant/components/homeassistant/strings.json b/homeassistant/components/homeassistant/strings.json index a0de0348025..7cf05527b6b 100644 --- a/homeassistant/components/homeassistant/strings.json +++ b/homeassistant/components/homeassistant/strings.json @@ -133,15 +133,15 @@ }, "toggle": { "name": "Generic toggle", - "description": "Generic service to toggle devices on/off under any domain." + "description": "Generic action to toggle devices on/off under any domain." }, "turn_on": { "name": "Generic turn on", - "description": "Generic service to turn devices on under any domain." + "description": "Generic action to turn devices on under any domain." }, "turn_off": { "name": "Generic turn off", - "description": "Generic service to turn devices off under any domain." + "description": "Generic action to turn devices off under any domain." }, "update_entity": { "name": "Update entity", @@ -205,19 +205,19 @@ "message": "Unknown error when validating config for {domain} from integration {p_name} - {error}." }, "service_not_found": { - "message": "Service {domain}.{service} not found." + "message": "Action {domain}.{service} not found." }, "service_does_not_support_response": { - "message": "A service which does not return responses can't be called with {return_response}." + "message": "An action which does not return responses can't be called with {return_response}." }, "service_lacks_response_request": { - "message": "The service call requires responses and must be called with {return_response}." + "message": "The action requires responses and must be called with {return_response}." }, "service_reponse_invalid": { - "message": "Failed to process the returned service response data, expected a dictionary, but got {response_data_type}." + "message": "Failed to process the returned action response data, expected a dictionary, but got {response_data_type}." }, "service_should_be_blocking": { - "message": "A non blocking service call with argument {non_blocking_argument} can't be used together with argument {return_response}." + "message": "A non blocking action call with argument {non_blocking_argument} can't be used together with argument {return_response}." } } } diff --git a/homeassistant/components/homekit/strings.json b/homeassistant/components/homekit/strings.json index 30ecfba569e..92b836d5ec6 100644 --- a/homeassistant/components/homekit/strings.json +++ b/homeassistant/components/homekit/strings.json @@ -80,7 +80,7 @@ }, "unpair": { "name": "Unpair an accessory or bridge", - "description": "Forcefully removes all pairings from an accessory to allow re-pairing. Use this service if the accessory is no longer responsive, and you want to avoid deleting and re-adding the entry. Room locations, and accessory preferences will be lost." + "description": "Forcefully removes all pairings from an accessory to allow re-pairing. Use this action if the accessory is no longer responsive, and you want to avoid deleting and re-adding the entry. Room locations, and accessory preferences will be lost." } } } diff --git a/homeassistant/components/homekit/type_cameras.py b/homeassistant/components/homekit/type_cameras.py index 40fd6b2aade..3851bb43541 100644 --- a/homeassistant/components/homekit/type_cameras.py +++ b/homeassistant/components/homekit/type_cameras.py @@ -233,8 +233,7 @@ class Camera(HomeAccessory, PyhapCamera): # type: ignore[misc] self._char_motion_detected = serv_motion.configure_char( CHAR_MOTION_DETECTED, value=False ) - if not self.motion_is_event: - self._async_update_motion_state(state) + self._async_update_motion_state(None, state) self._char_doorbell_detected = None self._char_doorbell_detected_switch = None @@ -264,9 +263,7 @@ class Camera(HomeAccessory, PyhapCamera): # type: ignore[misc] ) serv_speaker = self.add_preload_service(SERV_SPEAKER) serv_speaker.configure_char(CHAR_MUTE, value=0) - - if not self.doorbell_is_event: - self._async_update_doorbell_state(state) + self._async_update_doorbell_state(None, state) @pyhap_callback # type: ignore[misc] @callback @@ -304,20 +301,25 @@ class Camera(HomeAccessory, PyhapCamera): # type: ignore[misc] self, event: Event[EventStateChangedData] ) -> None: """Handle state change event listener callback.""" - if not state_changed_event_is_same_state(event): - self._async_update_motion_state(event.data["new_state"]) + if not state_changed_event_is_same_state(event) and ( + new_state := event.data["new_state"] + ): + self._async_update_motion_state(event.data["old_state"], new_state) @callback - def _async_update_motion_state(self, new_state: State | None) -> None: + def _async_update_motion_state( + self, old_state: State | None, new_state: State + ) -> None: """Handle link motion sensor state change to update HomeKit value.""" - if not new_state: - return - state = new_state.state char = self._char_motion_detected assert char is not None if self.motion_is_event: - if state in (STATE_UNKNOWN, STATE_UNAVAILABLE): + if ( + old_state is None + or old_state.state == STATE_UNAVAILABLE + or state in (STATE_UNKNOWN, STATE_UNAVAILABLE) + ): return _LOGGER.debug( "%s: Set linked motion %s sensor to True/False", @@ -348,16 +350,21 @@ class Camera(HomeAccessory, PyhapCamera): # type: ignore[misc] if not state_changed_event_is_same_state(event) and ( new_state := event.data["new_state"] ): - self._async_update_doorbell_state(new_state) + self._async_update_doorbell_state(event.data["old_state"], new_state) @callback - def _async_update_doorbell_state(self, new_state: State) -> None: + def _async_update_doorbell_state( + self, old_state: State | None, new_state: State + ) -> None: """Handle link doorbell sensor state change to update HomeKit value.""" assert self._char_doorbell_detected assert self._char_doorbell_detected_switch state = new_state.state if state == STATE_ON or ( - self.doorbell_is_event and state not in (STATE_UNKNOWN, STATE_UNAVAILABLE) + self.doorbell_is_event + and old_state is not None + and old_state.state != STATE_UNAVAILABLE + and state not in (STATE_UNKNOWN, STATE_UNAVAILABLE) ): self._char_doorbell_detected.set_value(DOORBELL_SINGLE_PRESS) self._char_doorbell_detected_switch.set_value(DOORBELL_SINGLE_PRESS) diff --git a/homeassistant/components/huawei_lte/strings.json b/homeassistant/components/huawei_lte/strings.json index b1b16184b0c..e044413f296 100644 --- a/homeassistant/components/huawei_lte/strings.json +++ b/homeassistant/components/huawei_lte/strings.json @@ -361,7 +361,7 @@ }, "suspend_integration": { "name": "Suspend integration", - "description": "Suspends integration. Suspending logs the integration out from the router, and stops accessing it. Useful e.g. if accessing the router web interface from another source such as a web browser is temporarily required. Invoke the resume_integration service to resume.\n.", + "description": "Suspends integration. Suspending logs the integration out from the router, and stops accessing it. Useful e.g. if accessing the router web interface from another source such as a web browser is temporarily required. Invoke the resume_integration action to resume.\n.", "fields": { "url": { "name": "[%key:common::config_flow::data::url%]", diff --git a/homeassistant/components/hunterdouglas_powerview/__init__.py b/homeassistant/components/hunterdouglas_powerview/__init__.py index 106a61e75cc..6f63641b722 100644 --- a/homeassistant/components/hunterdouglas_powerview/__init__.py +++ b/homeassistant/components/hunterdouglas_powerview/__init__.py @@ -9,7 +9,6 @@ from aiopvapi.rooms import Rooms from aiopvapi.scenes import Scenes from aiopvapi.shades import Shades -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_VERSION, CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady @@ -18,7 +17,7 @@ import homeassistant.helpers.config_validation as cv from .const import DOMAIN, HUB_EXCEPTIONS from .coordinator import PowerviewShadeUpdateCoordinator -from .model import PowerviewDeviceInfo, PowerviewEntryData +from .model import PowerviewConfigEntry, PowerviewDeviceInfo, PowerviewEntryData from .shade_data import PowerviewShadeData PARALLEL_UPDATES = 1 @@ -36,7 +35,7 @@ PLATFORMS = [ _LOGGER = logging.getLogger(__name__) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: PowerviewConfigEntry) -> bool: """Set up Hunter Douglas PowerView from a config entry.""" config = entry.data @@ -100,7 +99,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: # populate raw shade data into the coordinator for diagnostics coordinator.data.store_group_data(shade_data) - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = PowerviewEntryData( + entry.runtime_data = PowerviewEntryData( api=pv_request, room_data=room_data.processed, scene_data=scene_data.processed, @@ -126,8 +125,6 @@ async def async_get_device_info(hub: Hub) -> PowerviewDeviceInfo: ) -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: PowerviewConfigEntry) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/hunterdouglas_powerview/button.py b/homeassistant/components/hunterdouglas_powerview/button.py index ecb71f9653a..adb3e177a8e 100644 --- a/homeassistant/components/hunterdouglas_powerview/button.py +++ b/homeassistant/components/hunterdouglas_powerview/button.py @@ -20,15 +20,13 @@ from homeassistant.components.button import ( ButtonEntity, ButtonEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN from .coordinator import PowerviewShadeUpdateCoordinator from .entity import ShadeEntity -from .model import PowerviewDeviceInfo, PowerviewEntryData +from .model import PowerviewConfigEntry, PowerviewDeviceInfo @dataclass(frozen=True) @@ -75,13 +73,11 @@ BUTTONS_SHADE: Final = [ async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: PowerviewConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the hunter douglas advanced feature buttons.""" - - pv_entry: PowerviewEntryData = hass.data[DOMAIN][entry.entry_id] - + pv_entry = entry.runtime_data entities: list[ButtonEntity] = [] for shade in pv_entry.shade_data.values(): room_name = getattr(pv_entry.room_data.get(shade.room_id), ATTR_NAME, "") diff --git a/homeassistant/components/hunterdouglas_powerview/cover.py b/homeassistant/components/hunterdouglas_powerview/cover.py index 57409f37ac9..6ee5fc92a41 100644 --- a/homeassistant/components/hunterdouglas_powerview/cover.py +++ b/homeassistant/components/hunterdouglas_powerview/cover.py @@ -25,15 +25,14 @@ from homeassistant.components.cover import ( CoverEntity, CoverEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_call_later -from .const import DOMAIN, STATE_ATTRIBUTE_ROOM_NAME +from .const import STATE_ATTRIBUTE_ROOM_NAME from .coordinator import PowerviewShadeUpdateCoordinator from .entity import ShadeEntity -from .model import PowerviewDeviceInfo, PowerviewEntryData +from .model import PowerviewConfigEntry, PowerviewDeviceInfo _LOGGER = logging.getLogger(__name__) @@ -49,12 +48,13 @@ SCAN_INTERVAL = timedelta(minutes=10) async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: PowerviewConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the hunter douglas shades.""" - - pv_entry: PowerviewEntryData = hass.data[DOMAIN][entry.entry_id] - coordinator: PowerviewShadeUpdateCoordinator = pv_entry.coordinator + pv_entry = entry.runtime_data + coordinator = pv_entry.coordinator async def _async_initial_refresh() -> None: """Force position refresh shortly after adding. diff --git a/homeassistant/components/hunterdouglas_powerview/diagnostics.py b/homeassistant/components/hunterdouglas_powerview/diagnostics.py index 1fbf721d2bd..7d6908f1936 100644 --- a/homeassistant/components/hunterdouglas_powerview/diagnostics.py +++ b/homeassistant/components/hunterdouglas_powerview/diagnostics.py @@ -3,20 +3,18 @@ from __future__ import annotations from dataclasses import asdict -import logging from typing import Any import attr from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_CONFIGURATION_URL, CONF_HOST from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.device_registry import DeviceEntry -from .const import DOMAIN, REDACT_HUB_ADDRESS, REDACT_MAC_ADDRESS, REDACT_SERIAL_NUMBER -from .model import PowerviewEntryData +from .const import REDACT_HUB_ADDRESS, REDACT_MAC_ADDRESS, REDACT_SERIAL_NUMBER +from .model import PowerviewConfigEntry REDACT_CONFIG = { CONF_HOST, @@ -26,11 +24,9 @@ REDACT_CONFIG = { ATTR_CONFIGURATION_URL, } -_LOGGER = logging.getLogger(__name__) - async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: PowerviewConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" data = _async_get_diagnostics(hass, entry) @@ -47,7 +43,7 @@ async def async_get_config_entry_diagnostics( async def async_get_device_diagnostics( - hass: HomeAssistant, entry: ConfigEntry, device: DeviceEntry + hass: HomeAssistant, entry: PowerviewConfigEntry, device: DeviceEntry ) -> dict[str, Any]: """Return diagnostics for a device entry.""" data = _async_get_diagnostics(hass, entry) @@ -65,10 +61,10 @@ async def async_get_device_diagnostics( @callback def _async_get_diagnostics( hass: HomeAssistant, - entry: ConfigEntry, + entry: PowerviewConfigEntry, ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - pv_entry: PowerviewEntryData = hass.data[DOMAIN][entry.entry_id] + pv_entry = entry.runtime_data shade_data = pv_entry.coordinator.data.get_all_raw_data() hub_info = async_redact_data(asdict(pv_entry.device_info), REDACT_CONFIG) return {"hub_info": hub_info, "shade_data": shade_data} diff --git a/homeassistant/components/hunterdouglas_powerview/model.py b/homeassistant/components/hunterdouglas_powerview/model.py index 7cf259ced18..86296b949f4 100644 --- a/homeassistant/components/hunterdouglas_powerview/model.py +++ b/homeassistant/components/hunterdouglas_powerview/model.py @@ -9,8 +9,12 @@ from aiopvapi.resources.room import Room from aiopvapi.resources.scene import Scene from aiopvapi.resources.shade import BaseShade +from homeassistant.config_entries import ConfigEntry + from .coordinator import PowerviewShadeUpdateCoordinator +type PowerviewConfigEntry = ConfigEntry[PowerviewEntryData] + @dataclass class PowerviewEntryData: diff --git a/homeassistant/components/hunterdouglas_powerview/number.py b/homeassistant/components/hunterdouglas_powerview/number.py index b37331c08df..f893b04b2d1 100644 --- a/homeassistant/components/hunterdouglas_powerview/number.py +++ b/homeassistant/components/hunterdouglas_powerview/number.py @@ -2,7 +2,6 @@ from collections.abc import Callable from dataclasses import dataclass -import logging from typing import Final from aiopvapi.helpers.constants import ATTR_NAME, MOTION_VELOCITY @@ -13,17 +12,13 @@ from homeassistant.components.number import ( NumberMode, RestoreNumber, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN from .coordinator import PowerviewShadeUpdateCoordinator from .entity import ShadeEntity -from .model import PowerviewDeviceInfo, PowerviewEntryData - -_LOGGER = logging.getLogger(__name__) +from .model import PowerviewConfigEntry, PowerviewDeviceInfo @dataclass(frozen=True, kw_only=True) @@ -57,12 +52,12 @@ NUMBERS: Final = ( async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: PowerviewConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the hunter douglas number entities.""" - - pv_entry: PowerviewEntryData = hass.data[DOMAIN][entry.entry_id] - + pv_entry = entry.runtime_data entities: list[PowerViewNumber] = [] for shade in pv_entry.shade_data.values(): room_name = getattr(pv_entry.room_data.get(shade.room_id), ATTR_NAME, "") diff --git a/homeassistant/components/hunterdouglas_powerview/scene.py b/homeassistant/components/hunterdouglas_powerview/scene.py index af5b86960c4..2aaa255c5ab 100644 --- a/homeassistant/components/hunterdouglas_powerview/scene.py +++ b/homeassistant/components/hunterdouglas_powerview/scene.py @@ -9,14 +9,13 @@ from aiopvapi.helpers.constants import ATTR_NAME from aiopvapi.resources.scene import Scene as PvScene from homeassistant.components.scene import Scene -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN, STATE_ATTRIBUTE_ROOM_NAME +from .const import STATE_ATTRIBUTE_ROOM_NAME from .coordinator import PowerviewShadeUpdateCoordinator from .entity import HDEntity -from .model import PowerviewDeviceInfo, PowerviewEntryData +from .model import PowerviewConfigEntry, PowerviewDeviceInfo _LOGGER = logging.getLogger(__name__) @@ -24,12 +23,12 @@ RESYNC_DELAY = 60 async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: PowerviewConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up powerview scene entries.""" - - pv_entry: PowerviewEntryData = hass.data[DOMAIN][entry.entry_id] - + pv_entry = entry.runtime_data pvscenes: list[PowerViewScene] = [] for scene in pv_entry.scene_data.values(): room_name = getattr(pv_entry.room_data.get(scene.room_id), ATTR_NAME, "") diff --git a/homeassistant/components/hunterdouglas_powerview/select.py b/homeassistant/components/hunterdouglas_powerview/select.py index f1e9c491659..db850a0ddbf 100644 --- a/homeassistant/components/hunterdouglas_powerview/select.py +++ b/homeassistant/components/hunterdouglas_powerview/select.py @@ -4,24 +4,19 @@ from __future__ import annotations from collections.abc import Callable, Coroutine from dataclasses import dataclass -import logging from typing import Any, Final from aiopvapi.helpers.constants import ATTR_NAME, FUNCTION_SET_POWER from aiopvapi.resources.shade import BaseShade from homeassistant.components.select import SelectEntity, SelectEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN from .coordinator import PowerviewShadeUpdateCoordinator from .entity import ShadeEntity -from .model import PowerviewDeviceInfo, PowerviewEntryData - -_LOGGER = logging.getLogger(__name__) +from .model import PowerviewConfigEntry, PowerviewDeviceInfo @dataclass(frozen=True) @@ -57,12 +52,12 @@ DROPDOWNS: Final = [ async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: PowerviewConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the hunter douglas select entities.""" - - pv_entry: PowerviewEntryData = hass.data[DOMAIN][entry.entry_id] - + pv_entry = entry.runtime_data entities: list[PowerViewSelect] = [] for shade in pv_entry.shade_data.values(): if not shade.has_battery_info(): diff --git a/homeassistant/components/hunterdouglas_powerview/sensor.py b/homeassistant/components/hunterdouglas_powerview/sensor.py index b24193ac438..f5e3ddd5e12 100644 --- a/homeassistant/components/hunterdouglas_powerview/sensor.py +++ b/homeassistant/components/hunterdouglas_powerview/sensor.py @@ -13,15 +13,13 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import PERCENTAGE, SIGNAL_STRENGTH_DECIBELS, EntityCategory from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN from .coordinator import PowerviewShadeUpdateCoordinator from .entity import ShadeEntity -from .model import PowerviewDeviceInfo, PowerviewEntryData +from .model import PowerviewConfigEntry, PowerviewDeviceInfo @dataclass(frozen=True) @@ -79,12 +77,12 @@ SENSORS: Final = [ async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: PowerviewConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the hunter douglas sensor entities.""" - - pv_entry: PowerviewEntryData = hass.data[DOMAIN][entry.entry_id] - + pv_entry = entry.runtime_data entities: list[PowerViewSensor] = [] for shade in pv_entry.shade_data.values(): room_name = getattr(pv_entry.room_data.get(shade.room_id), ATTR_NAME, "") diff --git a/homeassistant/components/hunterdouglas_powerview/shade_data.py b/homeassistant/components/hunterdouglas_powerview/shade_data.py index fd2f0466467..01359da76f2 100644 --- a/homeassistant/components/hunterdouglas_powerview/shade_data.py +++ b/homeassistant/components/hunterdouglas_powerview/shade_data.py @@ -3,7 +3,6 @@ from __future__ import annotations from dataclasses import fields -import logging from typing import Any from aiopvapi.resources.model import PowerviewData @@ -11,8 +10,6 @@ from aiopvapi.resources.shade import BaseShade, ShadePosition from .util import async_map_data_by_id -_LOGGER = logging.getLogger(__name__) - POSITION_FIELDS = [field for field in fields(ShadePosition) if field.name != "velocity"] diff --git a/homeassistant/components/idasen_desk/manifest.json b/homeassistant/components/idasen_desk/manifest.json index a09d155b5b0..17a5f519274 100644 --- a/homeassistant/components/idasen_desk/manifest.json +++ b/homeassistant/components/idasen_desk/manifest.json @@ -12,5 +12,5 @@ "documentation": "https://www.home-assistant.io/integrations/idasen_desk", "iot_class": "local_push", "quality_scale": "silver", - "requirements": ["idasen-ha==2.6.1"] + "requirements": ["idasen-ha==2.6.2"] } diff --git a/homeassistant/components/intent/__init__.py b/homeassistant/components/intent/__init__.py index 9b09fa9167b..c933b94fdd4 100644 --- a/homeassistant/components/intent/__init__.py +++ b/homeassistant/components/intent/__init__.py @@ -2,6 +2,7 @@ from __future__ import annotations +from datetime import datetime import logging from typing import Any, Protocol @@ -120,6 +121,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: intent.async_register(hass, PauseTimerIntentHandler()) intent.async_register(hass, UnpauseTimerIntentHandler()) intent.async_register(hass, TimerStatusIntentHandler()) + intent.async_register(hass, GetCurrentDateIntentHandler()) + intent.async_register(hass, GetCurrentTimeIntentHandler()) return True @@ -370,6 +373,30 @@ class SetPositionIntentHandler(intent.DynamicServiceIntentHandler): raise intent.IntentHandleError(f"Domain not supported: {state.domain}") +class GetCurrentDateIntentHandler(intent.IntentHandler): + """Gets the current date.""" + + intent_type = intent.INTENT_GET_CURRENT_DATE + description = "Gets the current date" + + async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: + response = intent_obj.create_response() + response.async_set_speech_slots({"date": datetime.now().date()}) + return response + + +class GetCurrentTimeIntentHandler(intent.IntentHandler): + """Gets the current time.""" + + intent_type = intent.INTENT_GET_CURRENT_TIME + description = "Gets the current time" + + async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: + response = intent_obj.create_response() + response.async_set_speech_slots({"time": datetime.now().time()}) + return response + + async def _async_process_intent( hass: HomeAssistant, domain: str, platform: IntentPlatformProtocol ) -> None: diff --git a/homeassistant/components/israel_rail/__init__.py b/homeassistant/components/israel_rail/__init__.py new file mode 100644 index 00000000000..3c33a159a63 --- /dev/null +++ b/homeassistant/components/israel_rail/__init__.py @@ -0,0 +1,58 @@ +"""The Israel Rail component.""" + +import logging + +from israelrailapi import TrainSchedule + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady + +from .const import CONF_DESTINATION, CONF_START, DOMAIN +from .coordinator import IsraelRailDataUpdateCoordinator + +_LOGGER = logging.getLogger(__name__) + + +PLATFORMS: list[Platform] = [Platform.SENSOR] + + +type IsraelRailConfigEntry = ConfigEntry[IsraelRailDataUpdateCoordinator] + + +async def async_setup_entry(hass: HomeAssistant, entry: IsraelRailConfigEntry) -> bool: + """Set up Israel rail from a config entry.""" + config = entry.data + + start = config[CONF_START] + destination = config[CONF_DESTINATION] + + train_schedule = TrainSchedule() + + try: + await hass.async_add_executor_job(train_schedule.query, start, destination) + except Exception as e: + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="request_timeout", + translation_placeholders={ + "config_title": entry.title, + "error": str(e), + }, + ) from e + + israel_rail_coordinator = IsraelRailDataUpdateCoordinator( + hass, train_schedule, start, destination + ) + await israel_rail_coordinator.async_config_entry_first_refresh() + entry.runtime_data = israel_rail_coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: IsraelRailConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/israel_rail/config_flow.py b/homeassistant/components/israel_rail/config_flow.py new file mode 100644 index 00000000000..3adecaf428c --- /dev/null +++ b/homeassistant/components/israel_rail/config_flow.py @@ -0,0 +1,61 @@ +"""Config flow for israel rail.""" + +import logging +from typing import Any + +from israelrailapi import TrainSchedule +from israelrailapi.stations import STATIONS +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult + +from .const import CONF_DESTINATION, CONF_START, DOMAIN + +STATIONS_NAMES = [station["Heb"] for station in STATIONS.values()] + +DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_START): vol.In(STATIONS_NAMES), + vol.Required(CONF_DESTINATION): vol.In(STATIONS_NAMES), + } +) + +_LOGGER = logging.getLogger(__name__) + + +class IsraelRailConfigFlow(ConfigFlow, domain=DOMAIN): + """Israel rail config flow.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Async user step to set up the connection.""" + errors = {} + if user_input: + train_schedule = TrainSchedule() + try: + await self.hass.async_add_executor_job( + train_schedule.query, + user_input[CONF_START], + user_input[CONF_DESTINATION], + ) + except Exception: + _LOGGER.exception("Unknown error") + errors["base"] = "unknown" + if not errors: + unique_id = f"{user_input[CONF_START]} {user_input[CONF_DESTINATION]}" + await self.async_set_unique_id(unique_id) + self._abort_if_unique_id_configured() + + return self.async_create_entry( + title=unique_id, + data=user_input, + ) + + return self.async_show_form( + step_id="user", + data_schema=DATA_SCHEMA, + errors=errors, + ) diff --git a/homeassistant/components/israel_rail/const.py b/homeassistant/components/israel_rail/const.py new file mode 100644 index 00000000000..bb9c7534638 --- /dev/null +++ b/homeassistant/components/israel_rail/const.py @@ -0,0 +1,17 @@ +"""Constants for the israel rail integration.""" + +from datetime import timedelta +from typing import Final + +DOMAIN = "israel_rail" + +CONF_START: Final = "from" +CONF_DESTINATION: Final = "to" + +DEFAULT_NAME = "Next Destination" + +DEPARTURES_COUNT = 3 + +DEFAULT_SCAN_INTERVAL = timedelta(seconds=90) + +ATTRIBUTION = "Data provided by Israel rail." diff --git a/homeassistant/components/israel_rail/coordinator.py b/homeassistant/components/israel_rail/coordinator.py new file mode 100644 index 00000000000..d707f8c5ea6 --- /dev/null +++ b/homeassistant/components/israel_rail/coordinator.py @@ -0,0 +1,89 @@ +"""DataUpdateCoordinator for the israel rail integration.""" + +from __future__ import annotations + +from dataclasses import dataclass +from datetime import datetime +import logging + +from israelrailapi import TrainSchedule +from israelrailapi.api import TrainRoute +from israelrailapi.train_station import station_name_to_id + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +import homeassistant.util.dt as dt_util + +from .const import DEFAULT_SCAN_INTERVAL, DEPARTURES_COUNT, DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +@dataclass +class DataConnection: + """A connection data class.""" + + departure: datetime | None + platform: str + start: str + destination: str + train_number: str + trains: int + + +def departure_time(train_route: TrainRoute) -> datetime | None: + """Get departure time.""" + start_datetime = dt_util.parse_datetime(train_route.start_time) + return start_datetime.astimezone() if start_datetime else None + + +class IsraelRailDataUpdateCoordinator(DataUpdateCoordinator[list[DataConnection]]): + """A IsraelRail Data Update Coordinator.""" + + config_entry: ConfigEntry + + def __init__( + self, + hass: HomeAssistant, + train_schedule: TrainSchedule, + start: str, + destination: str, + ) -> None: + """Initialize the IsraelRail data coordinator.""" + super().__init__( + hass, + _LOGGER, + name=DOMAIN, + update_interval=DEFAULT_SCAN_INTERVAL, + ) + self._train_schedule = train_schedule + self._start = start + self._destination = destination + + async def _async_update_data(self) -> list[DataConnection]: + try: + train_routes = await self.hass.async_add_executor_job( + self._train_schedule.query, + self._start, + self._destination, + datetime.now().strftime("%Y-%m-%d"), + datetime.now().strftime("%H:%M"), + ) + except Exception as e: + raise UpdateFailed( + "Unable to connect and retrieve data from israelrail api", + ) from e + + return [ + DataConnection( + departure=departure_time(train_routes[i]), + train_number=train_routes[i].trains[0].data["trainNumber"], + platform=train_routes[i].trains[0].platform, + trains=len(train_routes[i].trains), + start=station_name_to_id(train_routes[i].trains[0].src), + destination=station_name_to_id(train_routes[i].trains[-1].dst), + ) + for i in range(DEPARTURES_COUNT) + if len(train_routes) > i and train_routes[i] is not None + ] diff --git a/homeassistant/components/israel_rail/icons.json b/homeassistant/components/israel_rail/icons.json new file mode 100644 index 00000000000..39f8f24c77b --- /dev/null +++ b/homeassistant/components/israel_rail/icons.json @@ -0,0 +1,27 @@ +{ + "entity": { + "sensor": { + "departure0": { + "default": "mdi:bus-clock" + }, + "departure1": { + "default": "mdi:bus-clock" + }, + "departure2": { + "default": "mdi:bus-clock" + }, + "duration": { + "default": "mdi:timeline-clock" + }, + "trains": { + "default": "mdi:train" + }, + "platform": { + "default": "mdi:bus-stop-uncovered" + }, + "train_number": { + "default": "mdi:numeric" + } + } + } +} diff --git a/homeassistant/components/israel_rail/manifest.json b/homeassistant/components/israel_rail/manifest.json new file mode 100644 index 00000000000..afe085f5729 --- /dev/null +++ b/homeassistant/components/israel_rail/manifest.json @@ -0,0 +1,10 @@ +{ + "domain": "israel_rail", + "name": "Israel Railways", + "codeowners": ["@shaiu"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/israel_rail", + "iot_class": "cloud_polling", + "loggers": ["israelrailapi"], + "requirements": ["israel-rail-api==0.1.2"] +} diff --git a/homeassistant/components/israel_rail/sensor.py b/homeassistant/components/israel_rail/sensor.py new file mode 100644 index 00000000000..132a9a74826 --- /dev/null +++ b/homeassistant/components/israel_rail/sensor.py @@ -0,0 +1,118 @@ +"""Support for israel rail.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from datetime import datetime +import logging +from typing import TYPE_CHECKING + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from . import IsraelRailConfigEntry +from .const import ATTRIBUTION, DEPARTURES_COUNT, DOMAIN +from .coordinator import DataConnection, IsraelRailDataUpdateCoordinator + +_LOGGER = logging.getLogger(__name__) + + +@dataclass(kw_only=True, frozen=True) +class IsraelRailSensorEntityDescription(SensorEntityDescription): + """Describes israel rail sensor entity.""" + + value_fn: Callable[[DataConnection], StateType | datetime] + + index: int = 0 + + +DEPARTURE_SENSORS: tuple[IsraelRailSensorEntityDescription, ...] = ( + *[ + IsraelRailSensorEntityDescription( + key=f"departure{i or ''}", + translation_key=f"departure{i}", + device_class=SensorDeviceClass.TIMESTAMP, + value_fn=lambda data_connection: data_connection.departure, + index=i, + ) + for i in range(DEPARTURES_COUNT) + ], +) + +SENSORS: tuple[IsraelRailSensorEntityDescription, ...] = ( + IsraelRailSensorEntityDescription( + key="platform", + translation_key="platform", + value_fn=lambda data_connection: data_connection.platform, + ), + IsraelRailSensorEntityDescription( + key="trains", + translation_key="trains", + value_fn=lambda data_connection: data_connection.trains, + ), + IsraelRailSensorEntityDescription( + key="train_number", + translation_key="train_number", + value_fn=lambda data_connection: data_connection.train_number, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: IsraelRailConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the sensor from a config entry created in the integrations UI.""" + coordinator = config_entry.runtime_data + + unique_id = config_entry.unique_id + + if TYPE_CHECKING: + assert unique_id + + async_add_entities( + IsraelRailEntitySensor(coordinator, description, unique_id) + for description in (*DEPARTURE_SENSORS, *SENSORS) + ) + + +class IsraelRailEntitySensor( + CoordinatorEntity[IsraelRailDataUpdateCoordinator], SensorEntity +): + """Define a Israel Rail sensor.""" + + entity_description: IsraelRailSensorEntityDescription + _attr_attribution = ATTRIBUTION + _attr_has_entity_name = True + + def __init__( + self, + coordinator: IsraelRailDataUpdateCoordinator, + entity_description: IsraelRailSensorEntityDescription, + unique_id: str, + ) -> None: + """Initialize the sensor.""" + super().__init__(coordinator) + self.entity_description = entity_description + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, unique_id)}, + entry_type=DeviceEntryType.SERVICE, + ) + self._attr_unique_id = f"{unique_id}_{entity_description.key}" + + @property + def native_value(self) -> StateType | datetime: + """Return the state of the sensor.""" + return self.entity_description.value_fn( + self.coordinator.data[self.entity_description.index] + ) diff --git a/homeassistant/components/israel_rail/strings.json b/homeassistant/components/israel_rail/strings.json new file mode 100644 index 00000000000..f42cf765e22 --- /dev/null +++ b/homeassistant/components/israel_rail/strings.json @@ -0,0 +1,42 @@ +{ + "config": { + "error": { + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + }, + "step": { + "user": { + "data": { + "from": "Start station", + "to": "End station" + }, + "description": "Provide start and end station for your connection from the provided list", + "title": "Israel Rail" + } + } + }, + "entity": { + "sensor": { + "departure0": { + "name": "Departure" + }, + "departure1": { + "name": "Departure +1" + }, + "departure2": { + "name": "Departure +2" + }, + "trains": { + "name": "Trains" + }, + "platform": { + "name": "Platform" + }, + "train_number": { + "name": "Train number" + } + } + } +} diff --git a/homeassistant/components/jellyfin/config_flow.py b/homeassistant/components/jellyfin/config_flow.py index 4798a07b9cd..baecbcfb941 100644 --- a/homeassistant/components/jellyfin/config_flow.py +++ b/homeassistant/components/jellyfin/config_flow.py @@ -97,7 +97,11 @@ class JellyfinConfigFlow(ConfigFlow, domain=DOMAIN): ) return self.async_show_form( - step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors + step_id="user", + data_schema=self.add_suggested_values_to_schema( + STEP_USER_DATA_SCHEMA, user_input + ), + errors=errors, ) async def async_step_reauth( diff --git a/homeassistant/components/knocki/config_flow.py b/homeassistant/components/knocki/config_flow.py index 724c65f83df..654dd4a4d1f 100644 --- a/homeassistant/components/knocki/config_flow.py +++ b/homeassistant/components/knocki/config_flow.py @@ -4,7 +4,7 @@ from __future__ import annotations from typing import Any -from knocki import KnockiClient, KnockiConnectionError +from knocki import KnockiClient, KnockiConnectionError, KnockiInvalidAuthError import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult @@ -45,6 +45,8 @@ class KnockiConfigFlow(ConfigFlow, domain=DOMAIN): raise except KnockiConnectionError: errors["base"] = "cannot_connect" + except KnockiInvalidAuthError: + errors["base"] = "invalid_auth" except Exception: # noqa: BLE001 LOGGER.exception("Error logging into the Knocki API") errors["base"] = "unknown" diff --git a/homeassistant/components/knocki/manifest.json b/homeassistant/components/knocki/manifest.json index e78e9856d62..f35827b8213 100644 --- a/homeassistant/components/knocki/manifest.json +++ b/homeassistant/components/knocki/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "cloud_push", "loggers": ["knocki"], - "requirements": ["knocki==0.2.0"] + "requirements": ["knocki==0.3.1"] } diff --git a/homeassistant/components/lifx/const.py b/homeassistant/components/lifx/const.py index 2208537b591..9b213cc9f6d 100644 --- a/homeassistant/components/lifx/const.py +++ b/homeassistant/components/lifx/const.py @@ -61,5 +61,6 @@ INFRARED_BRIGHTNESS_VALUES_MAP = { } DATA_LIFX_MANAGER = "lifx_manager" +LIFX_CEILING_PRODUCT_IDS = {176, 177} _LOGGER = logging.getLogger(__package__) diff --git a/homeassistant/components/lifx/coordinator.py b/homeassistant/components/lifx/coordinator.py index 63912cbb820..9d5532aeeb2 100644 --- a/homeassistant/components/lifx/coordinator.py +++ b/homeassistant/components/lifx/coordinator.py @@ -6,7 +6,7 @@ import asyncio from collections.abc import Callable from datetime import timedelta from enum import IntEnum -from functools import partial +from functools import cached_property, partial from math import floor, log10 from typing import Any, cast @@ -15,6 +15,7 @@ from aiolifx.aiolifx import ( Message, MultiZoneDirection, MultiZoneEffectType, + TileEffectSkyType, TileEffectType, ) from aiolifx.connection import LIFXConnection @@ -70,9 +71,18 @@ class FirmwareEffect(IntEnum): MOVE = 1 MORPH = 2 FLAME = 3 + SKY = 5 -class LIFXUpdateCoordinator(DataUpdateCoordinator[None]): +class SkyType(IntEnum): + """Enumeration of sky types for SKY firmware effect.""" + + SUNRISE = 0 + SUNSET = 1 + CLOUDS = 2 + + +class LIFXUpdateCoordinator(DataUpdateCoordinator[None]): # noqa: PLR0904 """DataUpdateCoordinator to gather data for a specific lifx device.""" def __init__( @@ -128,14 +138,14 @@ class LIFXUpdateCoordinator(DataUpdateCoordinator[None]): """Return the current infrared brightness as a string.""" return infrared_brightness_value_to_option(self.device.infrared_brightness) - @property + @cached_property def serial_number(self) -> str: """Return the internal mac address.""" return cast( str, self.device.mac_addr ) # device.mac_addr is not the mac_address, its the serial number - @property + @cached_property def mac_address(self) -> str: """Return the physical mac address.""" return get_real_mac_addr( @@ -149,6 +159,23 @@ class LIFXUpdateCoordinator(DataUpdateCoordinator[None]): """Return the label of the bulb.""" return cast(str, self.device.label) + @cached_property + def is_extended_multizone(self) -> bool: + """Return true if this is a multizone device.""" + return bool(lifx_features(self.device)["extended_multizone"]) + + @cached_property + def is_legacy_multizone(self) -> bool: + """Return true if this is a legacy multizone device.""" + return bool( + lifx_features(self.device)["multizone"] and not self.is_extended_multizone + ) + + @cached_property + def is_matrix(self) -> bool: + """Return true if this is a matrix device.""" + return bool(lifx_features(self.device)["matrix"]) + async def diagnostics(self) -> dict[str, Any]: """Return diagnostic information about the device.""" features = lifx_features(self.device) @@ -269,17 +296,23 @@ class LIFXUpdateCoordinator(DataUpdateCoordinator[None]): num_zones = self.get_number_of_zones() features = lifx_features(self.device) - is_extended_multizone = features["extended_multizone"] - is_legacy_multizone = not is_extended_multizone and features["multizone"] update_rssi = self._update_rssi methods: list[Callable] = [self.device.get_color] if update_rssi: methods.append(self.device.get_wifiinfo) - if is_extended_multizone: + if self.is_matrix: + methods.extend( + [ + self.device.get_tile_effect, + self.device.get_device_chain, + self.device.get64, + ] + ) + if self.is_extended_multizone: methods.append(self.device.get_extended_color_zones) - elif is_legacy_multizone: + elif self.is_legacy_multizone: methods.extend(self._async_build_color_zones_update_requests()) - if is_extended_multizone or is_legacy_multizone: + if self.is_extended_multizone or self.is_legacy_multizone: methods.append(self.device.get_multizone_effect) if features["hev"]: methods.append(self.device.get_hev_cycle) @@ -297,9 +330,9 @@ class LIFXUpdateCoordinator(DataUpdateCoordinator[None]): # We always send the rssi request second self._rssi = int(floor(10 * log10(responses[1].signal) + 0.5)) - if is_extended_multizone or is_legacy_multizone: + if self.is_matrix or self.is_extended_multizone or self.is_legacy_multizone: self.active_effect = FirmwareEffect[self.device.effect.get("effect", "OFF")] - if is_legacy_multizone and num_zones != self.get_number_of_zones(): + if self.is_legacy_multizone and num_zones != self.get_number_of_zones(): # The number of zones has changed so we need # to update the zones again. This happens rarely. await self.async_get_color_zones() @@ -402,7 +435,7 @@ class LIFXUpdateCoordinator(DataUpdateCoordinator[None]): power_on: bool = True, ) -> None: """Control the firmware-based Move effect on a multizone device.""" - if lifx_features(self.device)["multizone"] is True: + if self.is_extended_multizone or self.is_legacy_multizone: if power_on and self.device.power_level == 0: await self.async_set_power(True, 0) @@ -422,27 +455,36 @@ class LIFXUpdateCoordinator(DataUpdateCoordinator[None]): ) self.active_effect = FirmwareEffect[effect.upper()] - async def async_set_matrix_effect( + async def async_set_matrix_effect( # noqa: PLR0917 self, effect: str, palette: list[tuple[int, int, int, int]] | None = None, - speed: float = 3, + speed: float | None = None, power_on: bool = True, + sky_type: str | None = None, + cloud_saturation_min: int | None = None, + cloud_saturation_max: int | None = None, ) -> None: """Control the firmware-based effects on a matrix device.""" - if lifx_features(self.device)["matrix"] is True: + if self.is_matrix: if power_on and self.device.power_level == 0: await self.async_set_power(True, 0) if palette is None: palette = [] + if sky_type is not None: + sky_type = TileEffectSkyType[sky_type.upper()].value + await async_execute_lifx( partial( self.device.set_tile_effect, effect=TileEffectType[effect.upper()].value, speed=speed, palette=palette, + sky_type=sky_type, + cloud_saturation_min=cloud_saturation_min, + cloud_saturation_max=cloud_saturation_max, ) ) self.active_effect = FirmwareEffect[effect.upper()] diff --git a/homeassistant/components/lifx/icons.json b/homeassistant/components/lifx/icons.json index bf9e5e732d5..e32fdb5e06b 100644 --- a/homeassistant/components/lifx/icons.json +++ b/homeassistant/components/lifx/icons.json @@ -7,6 +7,7 @@ "effect_move": "mdi:cube-send", "effect_flame": "mdi:fire", "effect_morph": "mdi:shape-outline", + "effect_sky": "mdi:clouds", "effect_stop": "mdi:stop" } } diff --git a/homeassistant/components/lifx/light.py b/homeassistant/components/lifx/light.py index caa1140b099..22bcef4915e 100644 --- a/homeassistant/components/lifx/light.py +++ b/homeassistant/components/lifx/light.py @@ -36,6 +36,7 @@ from .const import ( DATA_LIFX_MANAGER, DOMAIN, INFRARED_BRIGHTNESS, + LIFX_CEILING_PRODUCT_IDS, ) from .coordinator import FirmwareEffect, LIFXUpdateCoordinator from .entity import LIFXEntity @@ -45,6 +46,7 @@ from .manager import ( SERVICE_EFFECT_MORPH, SERVICE_EFFECT_MOVE, SERVICE_EFFECT_PULSE, + SERVICE_EFFECT_SKY, SERVICE_EFFECT_STOP, LIFXManager, ) @@ -97,7 +99,10 @@ async def async_setup_entry( "set_hev_cycle_state", ) if lifx_features(device)["matrix"]: - entity: LIFXLight = LIFXMatrix(coordinator, manager, entry) + if device.product in LIFX_CEILING_PRODUCT_IDS: + entity: LIFXLight = LIFXCeiling(coordinator, manager, entry) + else: + entity = LIFXMatrix(coordinator, manager, entry) elif lifx_features(device)["extended_multizone"]: entity = LIFXExtendedMultiZone(coordinator, manager, entry) elif lifx_features(device)["multizone"]: @@ -499,3 +504,16 @@ class LIFXMatrix(LIFXColor): SERVICE_EFFECT_MORPH, SERVICE_EFFECT_STOP, ] + + +class LIFXCeiling(LIFXMatrix): + """Representation of a LIFX Ceiling device.""" + + _attr_effect_list = [ + SERVICE_EFFECT_COLORLOOP, + SERVICE_EFFECT_FLAME, + SERVICE_EFFECT_PULSE, + SERVICE_EFFECT_MORPH, + SERVICE_EFFECT_SKY, + SERVICE_EFFECT_STOP, + ] diff --git a/homeassistant/components/lifx/manager.py b/homeassistant/components/lifx/manager.py index 038fdceab26..c23837c5fcc 100644 --- a/homeassistant/components/lifx/manager.py +++ b/homeassistant/components/lifx/manager.py @@ -41,9 +41,12 @@ SERVICE_EFFECT_FLAME = "effect_flame" SERVICE_EFFECT_MORPH = "effect_morph" SERVICE_EFFECT_MOVE = "effect_move" SERVICE_EFFECT_PULSE = "effect_pulse" +SERVICE_EFFECT_SKY = "effect_sky" SERVICE_EFFECT_STOP = "effect_stop" ATTR_CHANGE = "change" +ATTR_CLOUD_SATURATION_MIN = "cloud_saturation_min" +ATTR_CLOUD_SATURATION_MAX = "cloud_saturation_max" ATTR_CYCLES = "cycles" ATTR_DIRECTION = "direction" ATTR_PALETTE = "palette" @@ -52,6 +55,7 @@ ATTR_POWER_OFF = "power_off" ATTR_POWER_ON = "power_on" ATTR_SATURATION_MAX = "saturation_max" ATTR_SATURATION_MIN = "saturation_min" +ATTR_SKY_TYPE = "sky_type" ATTR_SPEED = "speed" ATTR_SPREAD = "spread" @@ -59,6 +63,7 @@ EFFECT_FLAME = "FLAME" EFFECT_MORPH = "MORPH" EFFECT_MOVE = "MOVE" EFFECT_OFF = "OFF" +EFFECT_SKY = "SKY" EFFECT_FLAME_DEFAULT_SPEED = 3 @@ -72,6 +77,13 @@ EFFECT_MOVE_DIRECTION_LEFT = "left" EFFECT_MOVE_DIRECTIONS = [EFFECT_MOVE_DIRECTION_LEFT, EFFECT_MOVE_DIRECTION_RIGHT] +EFFECT_SKY_DEFAULT_SPEED = 50 +EFFECT_SKY_DEFAULT_SKY_TYPE = "Clouds" +EFFECT_SKY_DEFAULT_CLOUD_SATURATION_MIN = 50 +EFFECT_SKY_DEFAULT_CLOUD_SATURATION_MAX = 180 + +EFFECT_SKY_SKY_TYPES = ["Sunrise", "Sunset", "Clouds"] + PULSE_MODE_BLINK = "blink" PULSE_MODE_BREATHE = "breathe" PULSE_MODE_PING = "ping" @@ -137,13 +149,6 @@ LIFX_EFFECT_COLORLOOP_SCHEMA = cv.make_entity_service_schema( LIFX_EFFECT_STOP_SCHEMA = cv.make_entity_service_schema({}) -SERVICES = ( - SERVICE_EFFECT_STOP, - SERVICE_EFFECT_PULSE, - SERVICE_EFFECT_MOVE, - SERVICE_EFFECT_COLORLOOP, -) - LIFX_EFFECT_FLAME_SCHEMA = cv.make_entity_service_schema( { **LIFX_EFFECT_SCHEMA, @@ -185,6 +190,28 @@ LIFX_EFFECT_MOVE_SCHEMA = cv.make_entity_service_schema( } ) +LIFX_EFFECT_SKY_SCHEMA = cv.make_entity_service_schema( + { + **LIFX_EFFECT_SCHEMA, + ATTR_SPEED: vol.All(vol.Coerce(int), vol.Clamp(min=1, max=86400)), + ATTR_SKY_TYPE: vol.In(EFFECT_SKY_SKY_TYPES), + ATTR_CLOUD_SATURATION_MIN: vol.All(vol.Coerce(int), vol.Clamp(min=0, max=255)), + ATTR_CLOUD_SATURATION_MAX: vol.All(vol.Coerce(int), vol.Clamp(min=0, max=255)), + ATTR_PALETTE: vol.All(cv.ensure_list, [HSBK_SCHEMA]), + } +) + + +SERVICES = ( + SERVICE_EFFECT_COLORLOOP, + SERVICE_EFFECT_FLAME, + SERVICE_EFFECT_MORPH, + SERVICE_EFFECT_MOVE, + SERVICE_EFFECT_PULSE, + SERVICE_EFFECT_SKY, + SERVICE_EFFECT_STOP, +) + class LIFXManager: """Representation of all known LIFX entities.""" @@ -261,6 +288,13 @@ class LIFXManager: schema=LIFX_EFFECT_MOVE_SCHEMA, ) + self.hass.services.async_register( + DOMAIN, + SERVICE_EFFECT_SKY, + service_handler, + schema=LIFX_EFFECT_SKY_SCHEMA, + ) + self.hass.services.async_register( DOMAIN, SERVICE_EFFECT_STOP, @@ -375,6 +409,39 @@ class LIFXManager: ) await self.effects_conductor.start(effect, bulbs) + elif service == SERVICE_EFFECT_SKY: + palette = kwargs.get(ATTR_PALETTE, None) + if palette is not None: + theme = Theme() + for hsbk in palette: + theme.add_hsbk(hsbk[0], hsbk[1], hsbk[2], hsbk[3]) + + speed = kwargs.get(ATTR_SPEED, EFFECT_SKY_DEFAULT_SPEED) + sky_type = kwargs.get(ATTR_SKY_TYPE, EFFECT_SKY_DEFAULT_SKY_TYPE) + + cloud_saturation_min = kwargs.get( + ATTR_CLOUD_SATURATION_MIN, + EFFECT_SKY_DEFAULT_CLOUD_SATURATION_MIN, + ) + cloud_saturation_max = kwargs.get( + ATTR_CLOUD_SATURATION_MAX, + EFFECT_SKY_DEFAULT_CLOUD_SATURATION_MAX, + ) + + await asyncio.gather( + *( + coordinator.async_set_matrix_effect( + effect=EFFECT_SKY, + speed=speed, + sky_type=sky_type, + cloud_saturation_min=cloud_saturation_min, + cloud_saturation_max=cloud_saturation_max, + palette=theme.colors, + ) + for coordinator in coordinators + ) + ) + elif service == SERVICE_EFFECT_STOP: await self.effects_conductor.stop(bulbs) diff --git a/homeassistant/components/lifx/manifest.json b/homeassistant/components/lifx/manifest.json index 5e68c1bab35..3d0bd1d73d1 100644 --- a/homeassistant/components/lifx/manifest.json +++ b/homeassistant/components/lifx/manifest.json @@ -48,7 +48,7 @@ "iot_class": "local_polling", "loggers": ["aiolifx", "aiolifx_effects", "bitstring"], "requirements": [ - "aiolifx==1.0.4", + "aiolifx==1.0.5", "aiolifx-effects==0.3.2", "aiolifx-themes==0.4.15" ] diff --git a/homeassistant/components/lifx/services.yaml b/homeassistant/components/lifx/services.yaml index 83d31439666..c2eb2e249cb 100644 --- a/homeassistant/components/lifx/services.yaml +++ b/homeassistant/components/lifx/services.yaml @@ -281,6 +281,58 @@ effect_morph: default: true selector: boolean: +effect_sky: + target: + entity: + integration: lifx + domain: light + fields: + power_on: + default: true + selector: + boolean: + speed: + default: 50 + example: 50 + selector: + number: + min: 1 + max: 86400 + step: 1 + unit_of_measurement: seconds + sky_type: + default: "Clouds" + example: "Clouds" + selector: + select: + options: + - "Clouds" + - "Sunrise" + - "Sunset" + cloud_saturation_min: + default: 50 + example: 50 + selector: + number: + min: 0 + max: 255 + cloud_saturation_max: + default: 180 + example: 180 + selector: + number: + min: 0 + max: 255 + palette: + example: + - "[200, 1, 1, 3500]" + - "[241, 1, 0.01, 3500]" + - "[189, 1, 0.08, 3500]" + - "[40, 1, 1, 3500]" + - "[40, 0.5, 1, 3500]" + - "[40, 0, 1, 6500]" + selector: + object: effect_stop: target: entity: diff --git a/homeassistant/components/lifx/strings.json b/homeassistant/components/lifx/strings.json index 21f3b3fe52b..68f9e31aabd 100644 --- a/homeassistant/components/lifx/strings.json +++ b/homeassistant/components/lifx/strings.json @@ -220,6 +220,36 @@ } } }, + "effect_sky": { + "name": "Sky effect", + "description": "Starts the firmware-based Sky effect on LIFX Ceiling.", + "fields": { + "speed": { + "name": "Speed", + "description": "How long the Sunrise and Sunset sky types will take to complete. For the Cloud sky type, it is the speed of the clouds across the device." + }, + "sky_type": { + "name": "Sky type", + "description": "The style of sky that will be animated by the effect." + }, + "cloud_saturation_min": { + "name": "Cloud saturation Minimum", + "description": "Minimum cloud saturation." + }, + "cloud_saturation_max": { + "name": "Cloud Saturation maximum", + "description": "Maximum cloud saturation." + }, + "palette": { + "name": "Palette", + "description": "List of 1 to 6 colors as hue (0-360), saturation (0-100), brightness (0-100) and kelvin (1500-9000) values to use for this effect." + }, + "power_on": { + "name": "Power on", + "description": "[%key:component::lifx::services::effect_move::fields::power_on::description%]" + } + } + }, "effect_stop": { "name": "Stop effect", "description": "Stops a running effect." diff --git a/homeassistant/components/light/__init__.py b/homeassistant/components/light/__init__.py index 077071e6735..445096ae643 100644 --- a/homeassistant/components/light/__init__.py +++ b/homeassistant/components/light/__init__.py @@ -864,6 +864,16 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): ATTR_MAX_MIREDS, ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_MAX_COLOR_TEMP_KELVIN, + ATTR_BRIGHTNESS, + ATTR_COLOR_MODE, + ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, + ATTR_EFFECT, + ATTR_HS_COLOR, + ATTR_RGB_COLOR, + ATTR_RGBW_COLOR, + ATTR_RGBWW_COLOR, + ATTR_XY_COLOR, } ) diff --git a/homeassistant/components/local_file/strings.json b/homeassistant/components/local_file/strings.json index 3f977fc941e..0db5d709c69 100644 --- a/homeassistant/components/local_file/strings.json +++ b/homeassistant/components/local_file/strings.json @@ -2,7 +2,7 @@ "services": { "update_file_path": { "name": "Updates file path", - "description": "Use this service to change the file displayed by the camera.", + "description": "Use this action to change the file displayed by the camera.", "fields": { "entity_id": { "name": "Entity", diff --git a/homeassistant/components/lutron_caseta/__init__.py b/homeassistant/components/lutron_caseta/__init__.py index f6fed0688c4..178acea83f0 100644 --- a/homeassistant/components/lutron_caseta/__init__.py +++ b/homeassistant/components/lutron_caseta/__init__.py @@ -63,6 +63,7 @@ from .models import ( LUTRON_KEYPAD_SERIAL, LUTRON_KEYPAD_TYPE, LutronButton, + LutronCasetaConfigEntry, LutronCasetaData, LutronKeypad, LutronKeypadData, @@ -103,8 +104,6 @@ PLATFORMS = [ async def async_setup(hass: HomeAssistant, base_config: ConfigType) -> bool: """Set up the Lutron component.""" - hass.data.setdefault(DOMAIN, {}) - if DOMAIN in base_config: bridge_configs = base_config[DOMAIN] for config in bridge_configs: @@ -126,7 +125,7 @@ async def async_setup(hass: HomeAssistant, base_config: ConfigType) -> bool: async def _async_migrate_unique_ids( - hass: HomeAssistant, entry: config_entries.ConfigEntry + hass: HomeAssistant, entry: LutronCasetaConfigEntry ) -> None: """Migrate entities since the occupancygroup were not actually unique.""" @@ -153,14 +152,14 @@ async def _async_migrate_unique_ids( async def async_setup_entry( - hass: HomeAssistant, config_entry: config_entries.ConfigEntry + hass: HomeAssistant, entry: LutronCasetaConfigEntry ) -> bool: """Set up a bridge from a config entry.""" - entry_id = config_entry.entry_id - host = config_entry.data[CONF_HOST] - keyfile = hass.config.path(config_entry.data[CONF_KEYFILE]) - certfile = hass.config.path(config_entry.data[CONF_CERTFILE]) - ca_certs = hass.config.path(config_entry.data[CONF_CA_CERTS]) + entry_id = entry.entry_id + host = entry.data[CONF_HOST] + keyfile = hass.config.path(entry.data[CONF_KEYFILE]) + certfile = hass.config.path(entry.data[CONF_CERTFILE]) + ca_certs = hass.config.path(entry.data[CONF_CA_CERTS]) bridge = None try: @@ -185,14 +184,14 @@ async def async_setup_entry( raise ConfigEntryNotReady(f"Cannot connect to {host}") _LOGGER.debug("Connected to Lutron Caseta bridge via LEAP at %s", host) - await _async_migrate_unique_ids(hass, config_entry) + await _async_migrate_unique_ids(hass, entry) bridge_devices = bridge.get_devices() bridge_device = bridge_devices[BRIDGE_DEVICE_ID] - if not config_entry.unique_id: + if not entry.unique_id: hass.config_entries.async_update_entry( - config_entry, unique_id=serial_to_unique_id(bridge_device["serial"]) + entry, unique_id=serial_to_unique_id(bridge_device["serial"]) ) _async_register_bridge_device(hass, entry_id, bridge_device, bridge) @@ -202,13 +201,9 @@ async def async_setup_entry( # Store this bridge (keyed by entry_id) so it can be retrieved by the # platforms we're setting up. - hass.data[DOMAIN][entry_id] = LutronCasetaData( - bridge, - bridge_device, - keypad_data, - ) + entry.runtime_data = LutronCasetaData(bridge, bridge_device, keypad_data) - await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True @@ -497,14 +492,12 @@ def _async_subscribe_keypad_events( async def async_unload_entry( - hass: HomeAssistant, entry: config_entries.ConfigEntry + hass: HomeAssistant, entry: LutronCasetaConfigEntry ) -> bool: """Unload the bridge from a config entry.""" - data: LutronCasetaData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data await data.bridge.close() - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) class LutronCasetaDevice(Entity): @@ -605,10 +598,10 @@ def _id_to_identifier(lutron_id: str) -> tuple[str, str]: async def async_remove_config_entry_device( - hass: HomeAssistant, entry: config_entries.ConfigEntry, device_entry: dr.DeviceEntry + hass: HomeAssistant, entry: LutronCasetaConfigEntry, device_entry: dr.DeviceEntry ) -> bool: """Remove lutron_caseta config entry from a device.""" - data: LutronCasetaData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data bridge = data.bridge devices = bridge.get_devices() buttons = bridge.buttons diff --git a/homeassistant/components/lutron_caseta/binary_sensor.py b/homeassistant/components/lutron_caseta/binary_sensor.py index 73d468a88f2..bfed8c785ae 100644 --- a/homeassistant/components/lutron_caseta/binary_sensor.py +++ b/homeassistant/components/lutron_caseta/binary_sensor.py @@ -6,7 +6,6 @@ from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_SUGGESTED_AREA from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo @@ -14,12 +13,12 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import DOMAIN as CASETA_DOMAIN, LutronCasetaDevice, _area_name_from_id from .const import CONFIG_URL, MANUFACTURER, UNASSIGNED_AREA -from .models import LutronCasetaData +from .models import LutronCasetaConfigEntry async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: LutronCasetaConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Lutron Caseta binary_sensor platform. @@ -27,7 +26,7 @@ async def async_setup_entry( Adds occupancy groups from the Caseta bridge associated with the config_entry as binary_sensor entities. """ - data: LutronCasetaData = hass.data[CASETA_DOMAIN][config_entry.entry_id] + data = config_entry.runtime_data bridge = data.bridge occupancy_groups = bridge.occupancy_groups async_add_entities( diff --git a/homeassistant/components/lutron_caseta/button.py b/homeassistant/components/lutron_caseta/button.py index a1ed43a8b03..d2651673c4c 100644 --- a/homeassistant/components/lutron_caseta/button.py +++ b/homeassistant/components/lutron_caseta/button.py @@ -5,24 +5,22 @@ from __future__ import annotations from typing import Any from homeassistant.components.button import ButtonEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import LutronCasetaDevice -from .const import DOMAIN as CASETA_DOMAIN from .device_trigger import LEAP_TO_DEVICE_TYPE_SUBTYPE_MAP -from .models import LutronCasetaData +from .models import LutronCasetaConfigEntry, LutronCasetaData async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: LutronCasetaConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Lutron pico and keypad buttons.""" - data: LutronCasetaData = hass.data[CASETA_DOMAIN][config_entry.entry_id] + data = config_entry.runtime_data bridge = data.bridge button_devices = bridge.get_buttons() all_devices = data.bridge.get_devices() diff --git a/homeassistant/components/lutron_caseta/cover.py b/homeassistant/components/lutron_caseta/cover.py index 04fbb9e54c1..3edb62c0d98 100644 --- a/homeassistant/components/lutron_caseta/cover.py +++ b/homeassistant/components/lutron_caseta/cover.py @@ -10,13 +10,11 @@ from homeassistant.components.cover import ( CoverEntity, CoverEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import LutronCasetaDeviceUpdatableEntity -from .const import DOMAIN as CASETA_DOMAIN -from .models import LutronCasetaData +from .models import LutronCasetaConfigEntry class LutronCasetaShade(LutronCasetaDeviceUpdatableEntity, CoverEntity): @@ -114,7 +112,7 @@ PYLUTRON_TYPE_TO_CLASSES = { async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: LutronCasetaConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Lutron Caseta cover platform. @@ -122,7 +120,7 @@ async def async_setup_entry( Adds shades from the Caseta bridge associated with the config_entry as cover entities. """ - data: LutronCasetaData = hass.data[CASETA_DOMAIN][config_entry.entry_id] + data = config_entry.runtime_data bridge = data.bridge cover_devices = bridge.get_devices_by_domain(DOMAIN) async_add_entities( diff --git a/homeassistant/components/lutron_caseta/device_trigger.py b/homeassistant/components/lutron_caseta/device_trigger.py index 86b82e64127..0b432f88045 100644 --- a/homeassistant/components/lutron_caseta/device_trigger.py +++ b/homeassistant/components/lutron_caseta/device_trigger.py @@ -3,6 +3,7 @@ from __future__ import annotations import logging +from typing import cast import voluptuous as vol @@ -28,7 +29,7 @@ from .const import ( DOMAIN, LUTRON_CASETA_BUTTON_EVENT, ) -from .models import LutronCasetaData +from .models import LutronCasetaConfigEntry _LOGGER = logging.getLogger(__name__) @@ -434,11 +435,14 @@ async def async_attach_trigger( def get_lutron_data_by_dr_id(hass: HomeAssistant, device_id: str): """Get a lutron integration data for the given device registry device id.""" - if DOMAIN not in hass.data: - return None - - for entry_id in hass.data[DOMAIN]: - data: LutronCasetaData = hass.data[DOMAIN][entry_id] - if data.keypad_data.dr_device_id_to_keypad.get(device_id): - return data + entries = cast( + list[LutronCasetaConfigEntry], + hass.config_entries.async_entries( + DOMAIN, include_ignore=False, include_disabled=False + ), + ) + for entry in entries: + if hasattr(entry, "runtime_data"): + if entry.runtime_data.keypad_data.dr_device_id_to_keypad.get(device_id): + return entry.runtime_data return None diff --git a/homeassistant/components/lutron_caseta/diagnostics.py b/homeassistant/components/lutron_caseta/diagnostics.py index 61a24d21b4e..02763b14247 100644 --- a/homeassistant/components/lutron_caseta/diagnostics.py +++ b/homeassistant/components/lutron_caseta/diagnostics.py @@ -7,15 +7,12 @@ from typing import Any from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from .const import DOMAIN -from .models import LutronCasetaData - async def async_get_config_entry_diagnostics( hass: HomeAssistant, entry: ConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - data: LutronCasetaData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data bridge = data.bridge return { "entry": { diff --git a/homeassistant/components/lutron_caseta/fan.py b/homeassistant/components/lutron_caseta/fan.py index 1577cf52727..cd333ba22c4 100644 --- a/homeassistant/components/lutron_caseta/fan.py +++ b/homeassistant/components/lutron_caseta/fan.py @@ -7,7 +7,6 @@ from typing import Any from pylutron_caseta import FAN_HIGH, FAN_LOW, FAN_MEDIUM, FAN_MEDIUM_HIGH, FAN_OFF from homeassistant.components.fan import DOMAIN, FanEntity, FanEntityFeature -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.percentage import ( @@ -16,8 +15,7 @@ from homeassistant.util.percentage import ( ) from . import LutronCasetaDeviceUpdatableEntity -from .const import DOMAIN as CASETA_DOMAIN -from .models import LutronCasetaData +from .models import LutronCasetaConfigEntry DEFAULT_ON_PERCENTAGE = 50 ORDERED_NAMED_FAN_SPEEDS = [FAN_LOW, FAN_MEDIUM, FAN_MEDIUM_HIGH, FAN_HIGH] @@ -25,7 +23,7 @@ ORDERED_NAMED_FAN_SPEEDS = [FAN_LOW, FAN_MEDIUM, FAN_MEDIUM_HIGH, FAN_HIGH] async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: LutronCasetaConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Lutron Caseta fan platform. @@ -33,7 +31,7 @@ async def async_setup_entry( Adds fan controllers from the Caseta bridge associated with the config_entry as fan entities. """ - data: LutronCasetaData = hass.data[CASETA_DOMAIN][config_entry.entry_id] + data = config_entry.runtime_data bridge = data.bridge fan_devices = bridge.get_devices_by_domain(DOMAIN) async_add_entities(LutronCasetaFan(fan_device, data) for fan_device in fan_devices) diff --git a/homeassistant/components/lutron_caseta/light.py b/homeassistant/components/lutron_caseta/light.py index 44c4c63e094..c0cf9449f87 100644 --- a/homeassistant/components/lutron_caseta/light.py +++ b/homeassistant/components/lutron_caseta/light.py @@ -25,11 +25,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import LutronCasetaDeviceUpdatableEntity -from .const import ( - DEVICE_TYPE_SPECTRUM_TUNE, - DEVICE_TYPE_WHITE_TUNE, - DOMAIN as CASETA_DOMAIN, -) +from .const import DEVICE_TYPE_SPECTRUM_TUNE, DEVICE_TYPE_WHITE_TUNE from .models import LutronCasetaData SUPPORTED_COLOR_MODE_DICT = { @@ -64,7 +60,7 @@ async def async_setup_entry( Adds dimmers from the Caseta bridge associated with the config_entry as light entities. """ - data: LutronCasetaData = hass.data[CASETA_DOMAIN][config_entry.entry_id] + data = config_entry.runtime_data bridge = data.bridge light_devices = bridge.get_devices_by_domain(DOMAIN) async_add_entities( diff --git a/homeassistant/components/lutron_caseta/models.py b/homeassistant/components/lutron_caseta/models.py index d5ccbecbd61..402fa8885e8 100644 --- a/homeassistant/components/lutron_caseta/models.py +++ b/homeassistant/components/lutron_caseta/models.py @@ -8,8 +8,11 @@ from typing import Any, Final, TypedDict from pylutron_caseta.smartbridge import Smartbridge import voluptuous as vol +from homeassistant.config_entries import ConfigEntry from homeassistant.helpers.device_registry import DeviceInfo +type LutronCasetaConfigEntry = ConfigEntry[LutronCasetaData] + @dataclass class LutronCasetaData: diff --git a/homeassistant/components/lutron_caseta/scene.py b/homeassistant/components/lutron_caseta/scene.py index f4aebdafe9b..db4423495a4 100644 --- a/homeassistant/components/lutron_caseta/scene.py +++ b/homeassistant/components/lutron_caseta/scene.py @@ -11,7 +11,6 @@ from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN as CASETA_DOMAIN -from .models import LutronCasetaData from .util import serial_to_unique_id @@ -25,7 +24,7 @@ async def async_setup_entry( Adds scenes from the Caseta bridge associated with the config_entry as scene entities. """ - data: LutronCasetaData = hass.data[CASETA_DOMAIN][config_entry.entry_id] + data = config_entry.runtime_data bridge = data.bridge scenes = bridge.get_scenes() async_add_entities(LutronCasetaScene(scenes[scene], data) for scene in scenes) diff --git a/homeassistant/components/lutron_caseta/switch.py b/homeassistant/components/lutron_caseta/switch.py index 795435d5f7c..b7ec5b58b04 100644 --- a/homeassistant/components/lutron_caseta/switch.py +++ b/homeassistant/components/lutron_caseta/switch.py @@ -8,8 +8,6 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import LutronCasetaDeviceUpdatableEntity -from .const import DOMAIN as CASETA_DOMAIN -from .models import LutronCasetaData async def async_setup_entry( @@ -22,7 +20,7 @@ async def async_setup_entry( Adds switches from the Caseta bridge associated with the config_entry as switch entities. """ - data: LutronCasetaData = hass.data[CASETA_DOMAIN][config_entry.entry_id] + data = config_entry.runtime_data bridge = data.bridge switch_devices = bridge.get_devices_by_domain(DOMAIN) async_add_entities( diff --git a/homeassistant/components/matter/adapter.py b/homeassistant/components/matter/adapter.py index a3536435ded..d7a9f398c9f 100644 --- a/homeassistant/components/matter/adapter.py +++ b/homeassistant/components/matter/adapter.py @@ -4,7 +4,9 @@ from __future__ import annotations from typing import TYPE_CHECKING, cast +from chip.clusters.Objects import GeneralDiagnostics from matter_server.client.models.device_types import BridgedDevice +from matter_server.common.helpers.util import convert_mac_address from matter_server.common.models import EventType, ServerInfoMessage from homeassistant.config_entries import ConfigEntry @@ -22,6 +24,30 @@ if TYPE_CHECKING: from matter_server.client.models.node import MatterEndpoint, MatterNode +def get_connections_for_endpoint(endpoint: MatterEndpoint) -> set[tuple[str, str]]: + """Return a set of connections for a MatterEndpoint.""" + network_interfaces: list[GeneralDiagnostics.Structs.NetworkInterface] = ( + endpoint.get_attribute_value( + None, GeneralDiagnostics.Attributes.NetworkInterfaces + ) + or [] + ) + + hardware_addresses: set[str] = { + convert_mac_address(network_interface.hardwareAddress) + for network_interface in network_interfaces + if network_interface.hardwareAddress + } + + return { + (dr.CONNECTION_NETWORK_MAC, address) + if len(address) == 17 + else (dr.CONNECTION_ZIGBEE, address) + for address in hardware_addresses + if len(address) in (17, 23) # EUI-48 -> 17, EUI-64 -> 23 + } + + def get_clean_name(name: str | None) -> str | None: """Strip spaces and null char from the name.""" if name is None: @@ -185,6 +211,9 @@ class MatterAdapter: endpoint, ) identifiers = {(DOMAIN, f"{ID_TYPE_DEVICE_ID}_{node_device_id}")} + + connections = get_connections_for_endpoint(endpoint) + serial_number: str | None = None # if available, we also add the serialnumber as identifier if ( @@ -203,6 +232,7 @@ class MatterAdapter: name=name, config_entry_id=self.config_entry.entry_id, identifiers=identifiers, + connections=connections, hw_version=basic_info.hardwareVersionString, sw_version=basic_info.softwareVersionString, manufacturer=basic_info.vendorName or endpoint.node.device_info.vendorName, diff --git a/homeassistant/components/mealie/__init__.py b/homeassistant/components/mealie/__init__.py index 2d261af37a2..87b3e3988a2 100644 --- a/homeassistant/components/mealie/__init__.py +++ b/homeassistant/components/mealie/__init__.py @@ -4,15 +4,19 @@ from __future__ import annotations from aiomealie import MealieAuthenticationError, MealieClient, MealieConnectionError -from homeassistant.const import CONF_API_TOKEN, CONF_HOST, Platform +from homeassistant.const import CONF_API_TOKEN, CONF_HOST, CONF_VERIFY_SSL, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady +from homeassistant.exceptions import ( + ConfigEntryAuthFailed, + ConfigEntryError, + ConfigEntryNotReady, +) from homeassistant.helpers import config_validation as cv, device_registry as dr from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.device_registry import DeviceEntryType from homeassistant.helpers.typing import ConfigType -from .const import DOMAIN +from .const import DOMAIN, MIN_REQUIRED_MEALIE_VERSION from .coordinator import ( MealieConfigEntry, MealieData, @@ -20,6 +24,7 @@ from .coordinator import ( MealieShoppingListCoordinator, ) from .services import setup_services +from .utils import create_version PLATFORMS: list[Platform] = [Platform.CALENDAR, Platform.TODO] @@ -37,15 +42,28 @@ async def async_setup_entry(hass: HomeAssistant, entry: MealieConfigEntry) -> bo client = MealieClient( entry.data[CONF_HOST], token=entry.data[CONF_API_TOKEN], - session=async_get_clientsession(hass), + session=async_get_clientsession( + hass, verify_ssl=entry.data.get(CONF_VERIFY_SSL, True) + ), ) try: about = await client.get_about() + version = create_version(about.version) except MealieAuthenticationError as error: - raise ConfigEntryError("Authentication failed") from error + raise ConfigEntryAuthFailed from error except MealieConnectionError as error: raise ConfigEntryNotReady(error) from error + if not version.valid or version < MIN_REQUIRED_MEALIE_VERSION: + raise ConfigEntryError( + translation_domain=DOMAIN, + translation_key="version_error", + translation_placeholders={ + "mealie_version": about.version, + "min_version": MIN_REQUIRED_MEALIE_VERSION, + }, + ) + assert entry.unique_id device_registry = dr.async_get(hass) device_registry.async_get_or_create( @@ -59,8 +77,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: MealieConfigEntry) -> bo shoppinglist_coordinator = MealieShoppingListCoordinator(hass, client) await mealplan_coordinator.async_config_entry_first_refresh() - - await shoppinglist_coordinator.async_get_shopping_lists() await shoppinglist_coordinator.async_config_entry_first_refresh() entry.runtime_data = MealieData( diff --git a/homeassistant/components/mealie/config_flow.py b/homeassistant/components/mealie/config_flow.py index 550e4679720..6b75f57313c 100644 --- a/homeassistant/components/mealie/config_flow.py +++ b/homeassistant/components/mealie/config_flow.py @@ -1,20 +1,28 @@ """Config flow for Mealie.""" +from collections.abc import Mapping from typing import Any from aiomealie import MealieAuthenticationError, MealieClient, MealieConnectionError import voluptuous as vol -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_API_TOKEN, CONF_HOST +from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_API_TOKEN, CONF_HOST, CONF_VERIFY_SSL from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import DOMAIN, LOGGER +from .const import DOMAIN, LOGGER, MIN_REQUIRED_MEALIE_VERSION +from .utils import create_version -SCHEMA = vol.Schema( +USER_SCHEMA = vol.Schema( { vol.Required(CONF_HOST): str, vol.Required(CONF_API_TOKEN): str, + vol.Optional(CONF_VERIFY_SSL, default=True): bool, + } +) +REAUTH_SCHEMA = vol.Schema( + { + vol.Required(CONF_API_TOKEN): str, } ) @@ -22,28 +30,48 @@ SCHEMA = vol.Schema( class MealieConfigFlow(ConfigFlow, domain=DOMAIN): """Mealie config flow.""" + host: str | None = None + verify_ssl: bool = True + entry: ConfigEntry | None = None + + async def check_connection( + self, api_token: str + ) -> tuple[dict[str, str], str | None]: + """Check connection to the Mealie API.""" + assert self.host is not None + client = MealieClient( + self.host, + token=api_token, + session=async_get_clientsession(self.hass, verify_ssl=self.verify_ssl), + ) + try: + info = await client.get_user_info() + about = await client.get_about() + version = create_version(about.version) + except MealieConnectionError: + return {"base": "cannot_connect"}, None + except MealieAuthenticationError: + return {"base": "invalid_auth"}, None + except Exception: # noqa: BLE001 + LOGGER.exception("Unexpected error") + return {"base": "unknown"}, None + if not version.valid or version < MIN_REQUIRED_MEALIE_VERSION: + return {"base": "mealie_version"}, None + return {}, info.user_id + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" errors: dict[str, str] = {} if user_input: - client = MealieClient( - user_input[CONF_HOST], - token=user_input[CONF_API_TOKEN], - session=async_get_clientsession(self.hass), + self.host = user_input[CONF_HOST] + self.verify_ssl = user_input[CONF_VERIFY_SSL] + errors, user_id = await self.check_connection( + user_input[CONF_API_TOKEN], ) - try: - info = await client.get_user_info() - except MealieConnectionError: - errors["base"] = "cannot_connect" - except MealieAuthenticationError: - errors["base"] = "invalid_auth" - except Exception: # noqa: BLE001 - LOGGER.exception("Unexpected error") - errors["base"] = "unknown" - else: - await self.async_set_unique_id(info.user_id) + if not errors: + await self.async_set_unique_id(user_id) self._abort_if_unique_id_configured() return self.async_create_entry( title="Mealie", @@ -51,6 +79,79 @@ class MealieConfigFlow(ConfigFlow, domain=DOMAIN): ) return self.async_show_form( step_id="user", - data_schema=SCHEMA, + data_schema=USER_SCHEMA, + errors=errors, + ) + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Perform reauth upon an API authentication error.""" + self.host = entry_data[CONF_HOST] + self.verify_ssl = entry_data.get(CONF_VERIFY_SSL, True) + self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm reauth dialog.""" + errors: dict[str, str] = {} + if user_input: + errors, user_id = await self.check_connection( + user_input[CONF_API_TOKEN], + ) + if not errors: + assert self.entry + if self.entry.unique_id == user_id: + return self.async_update_reload_and_abort( + self.entry, + data={ + **self.entry.data, + CONF_API_TOKEN: user_input[CONF_API_TOKEN], + }, + ) + return self.async_abort(reason="wrong_account") + return self.async_show_form( + step_id="reauth_confirm", + data_schema=REAUTH_SCHEMA, + errors=errors, + ) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration of the integration.""" + self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) + return await self.async_step_reconfigure_confirm() + + async def async_step_reconfigure_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration confirmation.""" + errors: dict[str, str] = {} + if user_input: + self.host = user_input[CONF_HOST] + self.verify_ssl = user_input[CONF_VERIFY_SSL] + errors, user_id = await self.check_connection( + user_input[CONF_API_TOKEN], + ) + if not errors: + assert self.entry + if self.entry.unique_id == user_id: + return self.async_update_reload_and_abort( + self.entry, + data={ + **self.entry.data, + CONF_VERIFY_SSL: user_input[CONF_VERIFY_SSL], + CONF_HOST: user_input[CONF_HOST], + CONF_API_TOKEN: user_input[CONF_API_TOKEN], + }, + reason="reconfigure_successful", + ) + return self.async_abort(reason="wrong_account") + return self.async_show_form( + step_id="reconfigure_confirm", + data_schema=USER_SCHEMA, errors=errors, ) diff --git a/homeassistant/components/mealie/const.py b/homeassistant/components/mealie/const.py index 0eb7d98164c..800cfd21db3 100644 --- a/homeassistant/components/mealie/const.py +++ b/homeassistant/components/mealie/const.py @@ -2,6 +2,8 @@ import logging +from awesomeversion import AwesomeVersion + DOMAIN = "mealie" LOGGER = logging.getLogger(__package__) @@ -12,3 +14,5 @@ ATTR_END_DATE = "end_date" ATTR_RECIPE_ID = "recipe_id" ATTR_URL = "url" ATTR_INCLUDE_TAGS = "include_tags" + +MIN_REQUIRED_MEALIE_VERSION = AwesomeVersion("v1.0.0") diff --git a/homeassistant/components/mealie/coordinator.py b/homeassistant/components/mealie/coordinator.py index 135100e1b07..bb97b3c26a3 100644 --- a/homeassistant/components/mealie/coordinator.py +++ b/homeassistant/components/mealie/coordinator.py @@ -17,7 +17,7 @@ from aiomealie import ( from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryError +from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed import homeassistant.util.dt as dt_util @@ -82,7 +82,7 @@ class MealieMealplanCoordinator( await self.client.get_mealplans(dt_util.now().date(), next_week.date()) ).items except MealieAuthenticationError as error: - raise ConfigEntryError("Authentication failed") from error + raise ConfigEntryAuthFailed from error except MealieConnectionError as error: raise UpdateFailed(error) from error res: dict[MealplanEntryType, list[Mealplan]] = { @@ -96,8 +96,16 @@ class MealieMealplanCoordinator( return res +@dataclass +class ShoppingListData: + """Data class for shopping list data.""" + + shopping_list: ShoppingList + items: list[ShoppingItem] + + class MealieShoppingListCoordinator( - MealieDataUpdateCoordinator[dict[str, list[ShoppingItem]]] + MealieDataUpdateCoordinator[dict[str, ShoppingListData]] ): """Class to manage fetching Mealie Shopping list data.""" @@ -109,36 +117,25 @@ class MealieShoppingListCoordinator( client=client, update_interval=timedelta(minutes=5), ) - self.shopping_lists: list[ShoppingList] - - async def async_get_shopping_lists(self) -> list[ShoppingList]: - """Return shopping lists.""" - try: - self.shopping_lists = (await self.client.get_shopping_lists()).items - except MealieAuthenticationError as error: - raise ConfigEntryError("Authentication failed") from error - except MealieConnectionError as error: - raise UpdateFailed(error) from error - return self.shopping_lists async def _async_update_data( self, - ) -> dict[str, list[ShoppingItem]]: - shopping_list_items: dict[str, list[ShoppingItem]] = {} - + ) -> dict[str, ShoppingListData]: + shopping_list_items = {} try: - for shopping_list in self.shopping_lists: + shopping_lists = (await self.client.get_shopping_lists()).items + for shopping_list in shopping_lists: shopping_list_id = shopping_list.list_id shopping_items = ( await self.client.get_shopping_items(shopping_list_id) ).items - shopping_list_items[shopping_list_id] = shopping_items - + shopping_list_items[shopping_list_id] = ShoppingListData( + shopping_list=shopping_list, items=shopping_items + ) except MealieAuthenticationError as error: - raise ConfigEntryError("Authentication failed") from error + raise ConfigEntryAuthFailed from error except MealieConnectionError as error: raise UpdateFailed(error) from error - return shopping_list_items diff --git a/homeassistant/components/mealie/diagnostics.py b/homeassistant/components/mealie/diagnostics.py new file mode 100644 index 00000000000..b1c8640f007 --- /dev/null +++ b/homeassistant/components/mealie/diagnostics.py @@ -0,0 +1,31 @@ +"""Diagnostics support for the Mealie integration.""" + +from __future__ import annotations + +from dataclasses import asdict +from typing import Any + +from homeassistant.core import HomeAssistant + +from . import MealieConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, config_entry: MealieConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + data = config_entry.runtime_data + + about = await data.client.get_about() + + return { + "about": asdict(about), + "mealplans": { + entry_type: [asdict(mealplan) for mealplan in mealplans] + for entry_type, mealplans in data.mealplan_coordinator.data.items() + }, + "shoppinglist": { + list_id: asdict(shopping_list) + for list_id, shopping_list in data.shoppinglist_coordinator.data.items() + }, + } diff --git a/homeassistant/components/mealie/services.py b/homeassistant/components/mealie/services.py index ac8d5519310..7671c65b41f 100644 --- a/homeassistant/components/mealie/services.py +++ b/homeassistant/components/mealie/services.py @@ -19,6 +19,7 @@ from homeassistant.core import ( SupportsResponse, ) from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import config_validation as cv from .const import ( ATTR_CONFIG_ENTRY_ID, @@ -35,8 +36,8 @@ SERVICE_GET_MEALPLAN = "get_mealplan" SERVICE_GET_MEALPLAN_SCHEMA = vol.Schema( { vol.Required(ATTR_CONFIG_ENTRY_ID): str, - vol.Optional(ATTR_START_DATE): date, - vol.Optional(ATTR_END_DATE): date, + vol.Optional(ATTR_START_DATE): cv.date, + vol.Optional(ATTR_END_DATE): cv.date, } ) diff --git a/homeassistant/components/mealie/strings.json b/homeassistant/components/mealie/strings.json index 0e54a64b199..a0b0dcbfc4f 100644 --- a/homeassistant/components/mealie/strings.json +++ b/homeassistant/components/mealie/strings.json @@ -4,20 +4,39 @@ "user": { "data": { "host": "[%key:common::config_flow::data::url%]", - "api_token": "[%key:common::config_flow::data::api_token%]" + "api_token": "[%key:common::config_flow::data::api_token%]", + "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" }, "data_description": { "host": "The URL of your Mealie instance." } + }, + "reauth_confirm": { + "description": "Please reauthenticate with Mealie.", + "data": { + "api_token": "[%key:common::config_flow::data::api_token%]" + } + }, + "reconfigure_confirm": { + "description": "Please reconfigure with Mealie.", + "data": { + "host": "[%key:common::config_flow::data::url%]", + "api_token": "[%key:common::config_flow::data::api_token%]", + "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" + } } }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "unknown": "[%key:common::config_flow::error::unknown%]" + "unknown": "[%key:common::config_flow::error::unknown%]", + "mealie_version": "Minimum required version is v1.0.0. Please upgrade Mealie and then retry." }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "wrong_account": "You have to use the same account that was used to configure the integration." } }, "entity": { @@ -66,6 +85,9 @@ }, "item_not_found_error": { "message": "Item {shopping_list_item} not found." + }, + "version_error": { + "message": "You are running {mealie_version} of Mealie. Minimum required version is {min_version}. Please upgrade Mealie and then retry." } }, "services": { diff --git a/homeassistant/components/mealie/todo.py b/homeassistant/components/mealie/todo.py index 7009dedf105..508b6aeb5e2 100644 --- a/homeassistant/components/mealie/todo.py +++ b/homeassistant/components/mealie/todo.py @@ -5,6 +5,7 @@ from __future__ import annotations from aiomealie import MealieError, MutateShoppingItem, ShoppingItem, ShoppingList from homeassistant.components.todo import ( + DOMAIN as TODO_DOMAIN, TodoItem, TodoItemStatus, TodoListEntity, @@ -12,6 +13,7 @@ from homeassistant.components.todo import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN @@ -48,10 +50,36 @@ async def async_setup_entry( """Set up the todo platform for entity.""" coordinator = entry.runtime_data.shoppinglist_coordinator - async_add_entities( - MealieShoppingListTodoListEntity(coordinator, shopping_list) - for shopping_list in coordinator.shopping_lists - ) + added_lists: set[str] = set() + + assert entry.unique_id is not None + + def _async_delete_entities(lists: set[str]) -> None: + """Delete entities for removed shopping lists.""" + entity_registry = er.async_get(hass) + for list_id in lists: + entity_id = entity_registry.async_get_entity_id( + TODO_DOMAIN, DOMAIN, f"{entry.unique_id}_{list_id}" + ) + if entity_id: + entity_registry.async_remove(entity_id) + + def _async_entity_listener() -> None: + """Handle additions/deletions of shopping lists.""" + received_lists = set(coordinator.data) + new_lists = received_lists - added_lists + removed_lists = added_lists - received_lists + if new_lists: + async_add_entities( + MealieShoppingListTodoListEntity(coordinator, shopping_list_id) + for shopping_list_id in new_lists + ) + added_lists.update(new_lists) + if removed_lists: + _async_delete_entities(removed_lists) + + coordinator.async_add_listener(_async_entity_listener) + _async_entity_listener() class MealieShoppingListTodoListEntity(MealieEntity, TodoListEntity): @@ -69,17 +97,22 @@ class MealieShoppingListTodoListEntity(MealieEntity, TodoListEntity): coordinator: MealieShoppingListCoordinator def __init__( - self, coordinator: MealieShoppingListCoordinator, shopping_list: ShoppingList + self, coordinator: MealieShoppingListCoordinator, shopping_list_id: str ) -> None: """Create the todo entity.""" - super().__init__(coordinator, shopping_list.list_id) - self._shopping_list = shopping_list - self._attr_name = shopping_list.name + super().__init__(coordinator, shopping_list_id) + self._shopping_list_id = shopping_list_id + self._attr_name = self.shopping_list.name + + @property + def shopping_list(self) -> ShoppingList: + """Get the shopping list.""" + return self.coordinator.data[self._shopping_list_id].shopping_list @property def shopping_items(self) -> list[ShoppingItem]: """Get the shopping items for this list.""" - return self.coordinator.data[self._shopping_list.list_id] + return self.coordinator.data[self._shopping_list_id].items @property def todo_items(self) -> list[TodoItem] | None: @@ -93,7 +126,7 @@ class MealieShoppingListTodoListEntity(MealieEntity, TodoListEntity): position = self.shopping_items[-1].position + 1 new_shopping_item = MutateShoppingItem( - list_id=self._shopping_list.list_id, + list_id=self._shopping_list_id, note=item.summary.strip() if item.summary else item.summary, position=position, ) @@ -104,7 +137,7 @@ class MealieShoppingListTodoListEntity(MealieEntity, TodoListEntity): translation_domain=DOMAIN, translation_key="add_item_error", translation_placeholders={ - "shopping_list_name": self._shopping_list.name + "shopping_list_name": self.shopping_list.name }, ) from exception finally: @@ -164,7 +197,7 @@ class MealieShoppingListTodoListEntity(MealieEntity, TodoListEntity): translation_domain=DOMAIN, translation_key="update_item_error", translation_placeholders={ - "shopping_list_name": self._shopping_list.name + "shopping_list_name": self.shopping_list.name }, ) from exception finally: @@ -180,7 +213,7 @@ class MealieShoppingListTodoListEntity(MealieEntity, TodoListEntity): translation_domain=DOMAIN, translation_key="delete_item_error", translation_placeholders={ - "shopping_list_name": self._shopping_list.name + "shopping_list_name": self.shopping_list.name }, ) from exception finally: @@ -238,6 +271,4 @@ class MealieShoppingListTodoListEntity(MealieEntity, TodoListEntity): @property def available(self) -> bool: """Return False if shopping list no longer available.""" - return ( - super().available and self._shopping_list.list_id in self.coordinator.data - ) + return super().available and self._shopping_list_id in self.coordinator.data diff --git a/homeassistant/components/mealie/utils.py b/homeassistant/components/mealie/utils.py new file mode 100644 index 00000000000..36d0831208b --- /dev/null +++ b/homeassistant/components/mealie/utils.py @@ -0,0 +1,10 @@ +"""Mealie util functions.""" + +from __future__ import annotations + +from awesomeversion import AwesomeVersion + + +def create_version(version: str) -> AwesomeVersion: + """Convert beta versions to PEP440.""" + return AwesomeVersion(version.replace("beta-", "b")) diff --git a/homeassistant/components/modbus/strings.json b/homeassistant/components/modbus/strings.json index f89f9a97d52..8e746ca1299 100644 --- a/homeassistant/components/modbus/strings.json +++ b/homeassistant/components/modbus/strings.json @@ -100,7 +100,7 @@ }, "deprecated_restart": { "title": "`modbus.restart` is being removed", - "description": "Please use reload yaml via the developer tools in the UI instead of via the `modbus.restart` service." + "description": "Please use reload yaml via the developer tools in the UI instead of via the `modbus.restart` action." } } } diff --git a/homeassistant/components/mopeka/__init__.py b/homeassistant/components/mopeka/__init__.py index da3ee156683..2538ec3d810 100644 --- a/homeassistant/components/mopeka/__init__.py +++ b/homeassistant/components/mopeka/__init__.py @@ -14,37 +14,32 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from .const import DOMAIN - PLATFORMS: list[Platform] = [Platform.SENSOR] _LOGGER = logging.getLogger(__name__) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +type MopekaConfigEntry = ConfigEntry[PassiveBluetoothProcessorCoordinator] + + +async def async_setup_entry(hass: HomeAssistant, entry: MopekaConfigEntry) -> bool: """Set up Mopeka BLE device from a config entry.""" address = entry.unique_id assert address is not None data = MopekaIOTBluetoothDeviceData() - coordinator = hass.data.setdefault(DOMAIN, {})[entry.entry_id] = ( - PassiveBluetoothProcessorCoordinator( - hass, - _LOGGER, - address=address, - mode=BluetoothScanningMode.PASSIVE, - update_method=data.update, - ) + coordinator = entry.runtime_data = PassiveBluetoothProcessorCoordinator( + hass, + _LOGGER, + address=address, + mode=BluetoothScanningMode.PASSIVE, + update_method=data.update, ) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - entry.async_on_unload( - coordinator.async_start() - ) # only start after all platforms have had a chance to subscribe + # only start after all platforms have had a chance to subscribe + entry.async_on_unload(coordinator.async_start()) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: MopekaConfigEntry) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/mopeka/sensor.py b/homeassistant/components/mopeka/sensor.py index 74beaccd001..0f67efaea1e 100644 --- a/homeassistant/components/mopeka/sensor.py +++ b/homeassistant/components/mopeka/sensor.py @@ -4,11 +4,9 @@ from __future__ import annotations from mopeka_iot_ble import SensorUpdate -from homeassistant import config_entries from homeassistant.components.bluetooth.passive_update_processor import ( PassiveBluetoothDataProcessor, PassiveBluetoothDataUpdate, - PassiveBluetoothProcessorCoordinator, PassiveBluetoothProcessorEntity, ) from homeassistant.components.sensor import ( @@ -29,7 +27,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.sensor import sensor_device_info_to_hass_device_info -from .const import DOMAIN +from . import MopekaConfigEntry from .device import device_key_to_bluetooth_entity_key SENSOR_DESCRIPTIONS = { @@ -116,13 +114,11 @@ def sensor_update_to_bluetooth_data_update( async def async_setup_entry( hass: HomeAssistant, - entry: config_entries.ConfigEntry, + entry: MopekaConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Mopeka BLE sensors.""" - coordinator: PassiveBluetoothProcessorCoordinator = hass.data[DOMAIN][ - entry.entry_id - ] + coordinator = entry.runtime_data processor = PassiveBluetoothDataProcessor(sensor_update_to_bluetooth_data_update) entry.async_on_unload( processor.async_add_entities_listener( diff --git a/homeassistant/components/mvglive/manifest.json b/homeassistant/components/mvglive/manifest.json index c4a3040dc20..f73d4612c2e 100644 --- a/homeassistant/components/mvglive/manifest.json +++ b/homeassistant/components/mvglive/manifest.json @@ -2,6 +2,7 @@ "domain": "mvglive", "name": "MVG", "codeowners": [], + "disabled": "This integration is disabled because it uses non-open source code to operate.", "documentation": "https://www.home-assistant.io/integrations/mvglive", "iot_class": "cloud_polling", "loggers": ["MVGLive"], diff --git a/homeassistant/components/mvglive/ruff.toml b/homeassistant/components/mvglive/ruff.toml new file mode 100644 index 00000000000..38f6f586aef --- /dev/null +++ b/homeassistant/components/mvglive/ruff.toml @@ -0,0 +1,5 @@ +extend = "../../../pyproject.toml" + +lint.extend-ignore = [ + "F821" +] diff --git a/homeassistant/components/mvglive/sensor.py b/homeassistant/components/mvglive/sensor.py index 966bfebb577..b482de8130c 100644 --- a/homeassistant/components/mvglive/sensor.py +++ b/homeassistant/components/mvglive/sensor.py @@ -1,5 +1,6 @@ """Support for departure information for public transport in Munich.""" +# mypy: ignore-errors from __future__ import annotations from copy import deepcopy diff --git a/homeassistant/components/neato/strings.json b/homeassistant/components/neato/strings.json index 6a442e7c353..e2c983167b1 100644 --- a/homeassistant/components/neato/strings.json +++ b/homeassistant/components/neato/strings.json @@ -42,8 +42,8 @@ }, "services": { "custom_cleaning": { - "name": "Zone cleaning service", - "description": "Zone cleaning service call specific to Neato Botvacs.", + "name": "Zone cleaning action", + "description": "Zone cleaning action specific to Neato Botvacs.", "fields": { "mode": { "name": "Set cleaning mode", diff --git a/homeassistant/components/notify/strings.json b/homeassistant/components/notify/strings.json index 947b192c4cd..12d43b82c00 100644 --- a/homeassistant/components/notify/strings.json +++ b/homeassistant/components/notify/strings.json @@ -63,23 +63,23 @@ }, "issues": { "migrate_notify": { - "title": "Migration of {integration_title} notify service", + "title": "Migration of {integration_title} notify action", "fix_flow": { "step": { "confirm": { - "description": "The {integration_title} `notify` service(s) are migrated. A new `notify` entity is available now to replace each legacy `notify` service.\n\nUpdate any automations to use the new `notify.send_message` service exposed with this new entity. When this is done, fix this issue and restart Home Assistant.", - "title": "Migrate legacy {integration_title} notify service for domain `{domain}`" + "description": "The {integration_title} `notify` actions(s) are migrated. A new `notify` entity is available now to replace each legacy `notify` action.\n\nUpdate any automations to use the new `notify.send_message` action exposed with this new entity. When this is done, fix this issue and restart Home Assistant.", + "title": "Migrate legacy {integration_title} notify action for domain `{domain}`" } } } }, "migrate_notify_service": { - "title": "Legacy service `notify.{service_name}` stll being used", + "title": "Legacy action `notify.{service_name}` stll being used", "fix_flow": { "step": { "confirm": { - "description": "The {integration_title} `notify.{service_name}` service is migrated, but it seems the old `notify` service is still being used.\n\nA new `notify` entity is available now to replace each legacy `notify` service.\n\nUpdate any automations or scripts to use the new `notify.send_message` service exposed with this new entity. When this is done, select Submit and restart Home Assistant.", - "title": "Migrate legacy {integration_title} notify service for domain `{domain}`" + "description": "The {integration_title} `notify.{service_name}` action is migrated, but it seems the old `notify` action is still being used.\n\nA new `notify` entity is available now to replace each legacy `notify` action.\n\nUpdate any automations or scripts to use the new `notify.send_message` action exposed with this new entity. When this is done, select Submit and restart Home Assistant.", + "title": "Migrate legacy {integration_title} notify action for domain `{domain}`" } } } diff --git a/homeassistant/components/openai_conversation/strings.json b/homeassistant/components/openai_conversation/strings.json index c5d42eb9521..4af333d42b4 100644 --- a/homeassistant/components/openai_conversation/strings.json +++ b/homeassistant/components/openai_conversation/strings.json @@ -38,7 +38,7 @@ "fields": { "config_entry": { "name": "Config Entry", - "description": "The config entry to use for this service" + "description": "The config entry to use for this action" }, "prompt": { "name": "Prompt", @@ -67,8 +67,8 @@ }, "issues": { "image_size_deprecated_format": { - "title": "Deprecated size format for image generation service", - "description": "OpenAI is now using Dall-E 3 to generate images when calling `openai_conversation.generate_image`, which supports different sizes. Valid values are now \"1024x1024\", \"1024x1792\", \"1792x1024\". The old values of \"256\", \"512\", \"1024\" are currently interpreted as \"1024x1024\".\nPlease update your scripts or automations with the new parameters." + "title": "Deprecated size format for image generation action", + "description": "OpenAI is now using Dall-E 3 to generate images using `openai_conversation.generate_image`, which supports different sizes. Valid values are now \"1024x1024\", \"1024x1792\", \"1792x1024\". The old values of \"256\", \"512\", \"1024\" are currently interpreted as \"1024x1024\".\nPlease update your scripts or automations with the new parameters." } } } diff --git a/homeassistant/components/opentherm_gw/__init__.py b/homeassistant/components/opentherm_gw/__init__.py index 46cc6f3daa0..a0d791fddd4 100644 --- a/homeassistant/components/opentherm_gw/__init__.py +++ b/homeassistant/components/opentherm_gw/__init__.py @@ -470,3 +470,8 @@ class OpenThermGatewayDevice: async_dispatcher_send(self.hass, self.update_signal, status) self.gateway.subscribe(handle_report) + + @property + def connected(self): + """Report whether or not we are connected to the gateway.""" + return self.gateway.connection.connected diff --git a/homeassistant/components/opentherm_gw/binary_sensor.py b/homeassistant/components/opentherm_gw/binary_sensor.py index ad8d09afa89..7c3760653e8 100644 --- a/homeassistant/components/opentherm_gw/binary_sensor.py +++ b/homeassistant/components/opentherm_gw/binary_sensor.py @@ -48,6 +48,7 @@ class OpenThermBinarySensor(BinarySensorEntity): _attr_should_poll = False _attr_entity_registry_enabled_default = False + _attr_available = False def __init__(self, gw_dev, var, source, device_class, friendly_name_format): """Initialize the binary sensor.""" @@ -85,14 +86,10 @@ class OpenThermBinarySensor(BinarySensorEntity): _LOGGER.debug("Removing OpenTherm Gateway binary sensor %s", self._attr_name) self._unsub_updates() - @property - def available(self): - """Return availability of the sensor.""" - return self._attr_is_on is not None - @callback def receive_report(self, status): """Handle status updates from the component.""" + self._attr_available = self._gateway.connected state = status[self._source].get(self._var) self._attr_is_on = None if state is None else bool(state) self.async_write_ha_state() diff --git a/homeassistant/components/opentherm_gw/climate.py b/homeassistant/components/opentherm_gw/climate.py index 2d9f1687463..5eb1246e55f 100644 --- a/homeassistant/components/opentherm_gw/climate.py +++ b/homeassistant/components/opentherm_gw/climate.py @@ -138,7 +138,7 @@ class OpenThermClimate(ClimateEntity): @callback def receive_report(self, status): """Receive and handle a new report from the Gateway.""" - self._attr_available = status != gw_vars.DEFAULT_STATUS + self._attr_available = self._gateway.connected ch_active = status[gw_vars.BOILER].get(gw_vars.DATA_SLAVE_CH_ACTIVE) flame_on = status[gw_vars.BOILER].get(gw_vars.DATA_SLAVE_FLAME_ON) cooling_active = status[gw_vars.BOILER].get(gw_vars.DATA_SLAVE_COOLING_ACTIVE) diff --git a/homeassistant/components/opentherm_gw/sensor.py b/homeassistant/components/opentherm_gw/sensor.py index 9171292c21b..8c17aca4516 100644 --- a/homeassistant/components/opentherm_gw/sensor.py +++ b/homeassistant/components/opentherm_gw/sensor.py @@ -45,6 +45,7 @@ class OpenThermSensor(SensorEntity): _attr_should_poll = False _attr_entity_registry_enabled_default = False + _attr_available = False def __init__( self, @@ -94,14 +95,10 @@ class OpenThermSensor(SensorEntity): _LOGGER.debug("Removing OpenTherm Gateway sensor %s", self._attr_name) self._unsub_updates() - @property - def available(self): - """Return availability of the sensor.""" - return self._attr_native_value is not None - @callback def receive_report(self, status): """Handle status updates from the component.""" + self._attr_available = self._gateway.connected value = status[self._source].get(self._var) self._attr_native_value = value self.async_write_ha_state() diff --git a/homeassistant/components/opentherm_gw/strings.json b/homeassistant/components/opentherm_gw/strings.json index 2ad34f8d659..9eb97539df9 100644 --- a/homeassistant/components/opentherm_gw/strings.json +++ b/homeassistant/components/opentherm_gw/strings.json @@ -41,7 +41,7 @@ }, "set_central_heating_ovrd": { "name": "Set central heating override", - "description": "Sets the central heating override option on the gateway. When overriding the control setpoint (via a set_control_setpoint service call with a value other than 0), the gateway automatically enables the central heating override to start heating. This service can then be used to control the central heating override status. To return control of the central heating to the thermostat, call the set_control_setpoint service with temperature value 0. You will only need this if you are writing your own software thermostat.\n.", + "description": "Sets the central heating override option on the gateway. When overriding the control setpoint (via a set_control_setpoint action with a value other than 0), the gateway automatically enables the central heating override to start heating. This action can then be used to control the central heating override status. To return control of the central heating to the thermostat, use the set_control_setpoint action with temperature value 0. You will only need this if you are writing your own software thermostat.\n.", "fields": { "gateway_id": { "name": "[%key:component::opentherm_gw::services::reset_gateway::fields::gateway_id::name%]", diff --git a/homeassistant/components/opower/manifest.json b/homeassistant/components/opower/manifest.json index d419fdcb043..28c2e8ba2a8 100644 --- a/homeassistant/components/opower/manifest.json +++ b/homeassistant/components/opower/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/opower", "iot_class": "cloud_polling", "loggers": ["opower"], - "requirements": ["opower==0.4.7"] + "requirements": ["opower==0.5.2"] } diff --git a/homeassistant/components/pyload/coordinator.py b/homeassistant/components/pyload/coordinator.py index c55ca4c1630..7eadefcd260 100644 --- a/homeassistant/components/pyload/coordinator.py +++ b/homeassistant/components/pyload/coordinator.py @@ -30,7 +30,7 @@ class PyLoadData: speed: float download: bool reconnect: bool - captcha: bool + captcha: bool | None = None free_space: int diff --git a/homeassistant/components/pyload/manifest.json b/homeassistant/components/pyload/manifest.json index fe1888478f8..788cdd1eb05 100644 --- a/homeassistant/components/pyload/manifest.json +++ b/homeassistant/components/pyload/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_polling", "loggers": ["pyloadapi"], "quality_scale": "platinum", - "requirements": ["PyLoadAPI==1.2.0"] + "requirements": ["PyLoadAPI==1.3.2"] } diff --git a/homeassistant/components/rainforest_raven/coordinator.py b/homeassistant/components/rainforest_raven/coordinator.py index 37e44b12eba..d08a10c2670 100644 --- a/homeassistant/components/rainforest_raven/coordinator.py +++ b/homeassistant/components/rainforest_raven/coordinator.py @@ -167,7 +167,7 @@ class RAVEnDataCoordinator(DataUpdateCoordinator): await device.synchronize() self._device_info = await device.get_device_info() except: - await device.close() + await device.abort() raise self._raven_device = device diff --git a/homeassistant/components/rainforest_raven/manifest.json b/homeassistant/components/rainforest_raven/manifest.json index bc44c3fc30c..49bd11e8880 100644 --- a/homeassistant/components/rainforest_raven/manifest.json +++ b/homeassistant/components/rainforest_raven/manifest.json @@ -6,7 +6,7 @@ "dependencies": ["usb"], "documentation": "https://www.home-assistant.io/integrations/rainforest_raven", "iot_class": "local_polling", - "requirements": ["aioraven==0.6.0"], + "requirements": ["aioraven==0.7.0"], "usb": [ { "vid": "0403", diff --git a/homeassistant/components/recorder/const.py b/homeassistant/components/recorder/const.py index f2af5306ded..00121608b4c 100644 --- a/homeassistant/components/recorder/const.py +++ b/homeassistant/components/recorder/const.py @@ -32,8 +32,7 @@ DOMAIN = "recorder" CONF_DB_INTEGRITY_CHECK = "db_integrity_check" MAX_QUEUE_BACKLOG_MIN_VALUE = 65000 -ESTIMATED_QUEUE_ITEM_SIZE = 10240 -QUEUE_PERCENTAGE_ALLOWED_AVAILABLE_MEMORY = 0.65 +MIN_AVAILABLE_MEMORY_FOR_QUEUE_BACKLOG = 256 * 1024**2 # The maximum number of rows (events) we purge in one delete statement diff --git a/homeassistant/components/recorder/core.py b/homeassistant/components/recorder/core.py index 01fda0f02fa..09c85105121 100644 --- a/homeassistant/components/recorder/core.py +++ b/homeassistant/components/recorder/core.py @@ -16,7 +16,14 @@ import time from typing import TYPE_CHECKING, Any, cast import psutil_home_assistant as ha_psutil -from sqlalchemy import create_engine, event as sqlalchemy_event, exc, select, update +from sqlalchemy import ( + create_engine, + event as sqlalchemy_event, + exc, + inspect, + select, + update, +) from sqlalchemy.engine import Engine from sqlalchemy.engine.interfaces import DBAPIConnection from sqlalchemy.exc import SQLAlchemyError @@ -53,16 +60,15 @@ from . import migration, statistics from .const import ( DB_WORKER_PREFIX, DOMAIN, - ESTIMATED_QUEUE_ITEM_SIZE, KEEPALIVE_TIME, LAST_REPORTED_SCHEMA_VERSION, LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION, MARIADB_PYMYSQL_URL_PREFIX, MARIADB_URL_PREFIX, MAX_QUEUE_BACKLOG_MIN_VALUE, + MIN_AVAILABLE_MEMORY_FOR_QUEUE_BACKLOG, MYSQLDB_PYMYSQL_URL_PREFIX, MYSQLDB_URL_PREFIX, - QUEUE_PERCENTAGE_ALLOWED_AVAILABLE_MEMORY, SQLITE_MAX_BIND_VARS, SQLITE_URL_PREFIX, STATISTICS_ROWS_SCHEMA_VERSION, @@ -156,6 +162,7 @@ ADJUST_LRU_SIZE_TASK = AdjustLRUSizeTask() DB_LOCK_TIMEOUT = 30 DB_LOCK_QUEUE_CHECK_TIMEOUT = 10 # check every 10 seconds +QUEUE_CHECK_INTERVAL = timedelta(minutes=5) INVALIDATED_ERR = "Database connection invalidated" CONNECTIVITY_ERR = "Error in database connectivity during commit" @@ -319,9 +326,8 @@ class Recorder(threading.Thread): if event.event_type in exclude_event_types: return - if ( - entity_filter is None - or (entity_id := event.data.get(ATTR_ENTITY_ID)) is None + if entity_filter is None or not ( + entity_id := event.data.get(ATTR_ENTITY_ID) ): queue_put(event) return @@ -348,7 +354,7 @@ class Recorder(threading.Thread): self._queue_watcher = async_track_time_interval( self.hass, self._async_check_queue, - timedelta(minutes=10), + QUEUE_CHECK_INTERVAL, name="Recorder queue watcher", ) @@ -388,9 +394,8 @@ class Recorder(threading.Thread): The queue grows during migration or if something really goes wrong. """ - size = self.backlog - _LOGGER.debug("Recorder queue size is: %s", size) - if not self._reached_max_backlog_percentage(100): + _LOGGER.debug("Recorder queue size is: %s", self.backlog) + if not self._reached_max_backlog(): return _LOGGER.error( ( @@ -409,22 +414,15 @@ class Recorder(threading.Thread): self._psutil = ha_psutil.PsutilWrapper() return cast(int, self._psutil.psutil.virtual_memory().available) - def _reached_max_backlog_percentage(self, percentage: int) -> bool: - """Check if the system has reached the max queue backlog and return the maximum if it has.""" - percentage_modifier = percentage / 100 - current_backlog = self.backlog + def _reached_max_backlog(self) -> bool: + """Check if the system has reached the max queue backlog and return True if it has.""" # First check the minimum value since its cheap - if current_backlog < (MAX_QUEUE_BACKLOG_MIN_VALUE * percentage_modifier): + if self.backlog < MAX_QUEUE_BACKLOG_MIN_VALUE: return False # If they have more RAM available, keep filling the backlog # since we do not want to stop recording events or give the # user a bad backup when they have plenty of RAM available. - max_queue_backlog = int( - QUEUE_PERCENTAGE_ALLOWED_AVAILABLE_MEMORY - * (self._available_memory() / ESTIMATED_QUEUE_ITEM_SIZE) - ) - self.max_backlog = max(max_queue_backlog, MAX_QUEUE_BACKLOG_MIN_VALUE) - return current_backlog >= (max_queue_backlog * percentage_modifier) + return self._available_memory() < MIN_AVAILABLE_MEMORY_FOR_QUEUE_BACKLOG @callback def _async_stop_queue_watcher_and_event_listener(self) -> None: @@ -829,7 +827,7 @@ class Recorder(threading.Thread): # If ix_states_entity_id_last_updated_ts still exists # on the states table it means the entity id migration # finished by the EntityIDPostMigrationTask did not - # because they restarted in the middle of it. We need + # complete because they restarted in the middle of it. We need # to pick back up where we left off. if get_index_by_name( session, @@ -841,9 +839,13 @@ class Recorder(threading.Thread): if self.schema_version > LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION: with contextlib.suppress(SQLAlchemyError): # If the index of event_ids on the states table is still present - # we need to queue a task to remove it. - if get_index_by_name( - session, TABLE_STATES, LEGACY_STATES_EVENT_ID_INDEX + # or the event_id foreign key still exists we need to queue a + # task to remove it. + if ( + get_index_by_name( + session, TABLE_STATES, LEGACY_STATES_EVENT_ID_INDEX + ) + or self._legacy_event_id_foreign_key_exists() ): self.queue_task(EventIdMigrationTask()) self.use_legacy_events_index = True @@ -1020,13 +1022,12 @@ class Recorder(threading.Thread): # Notify that lock is being held, wait until database can be used again. hass.add_job(_async_set_database_locked, task) while not task.database_unlock.wait(timeout=DB_LOCK_QUEUE_CHECK_TIMEOUT): - if self._reached_max_backlog_percentage(90): + if self._reached_max_backlog(): _LOGGER.warning( - "Database queue backlog reached more than %s (%s events) of maximum queue " - "length while waiting for backup to finish; recorder will now " + "Database queue backlog reached more than %s events " + "while waiting for backup to finish; recorder will now " "resume writing to database. The backup cannot be trusted and " "must be restarted", - "90%", self.backlog, ) task.queue_overflow = True @@ -1181,7 +1182,15 @@ class Recorder(threading.Thread): def _handle_database_error(self, err: Exception) -> bool: """Handle a database error that may result in moving away the corrupt db.""" - if isinstance(err.__cause__, sqlite3.DatabaseError): + if ( + (cause := err.__cause__) + and isinstance(cause, sqlite3.DatabaseError) + and (cause_str := str(cause)) + # Make sure we do not move away a database when its only locked + # externally by another process. sqlite does not give us a named + # exception for this so we have to check the error message. + and ("malformed" in cause_str or "not a database" in cause_str) + ): _LOGGER.exception( "Unrecoverable sqlite3 database corruption detected: %s", err ) @@ -1295,6 +1304,21 @@ class Recorder(threading.Thread): """Run post schema migration tasks.""" migration.post_schema_migration(self, old_version, new_version) + def _legacy_event_id_foreign_key_exists(self) -> bool: + """Check if the legacy event_id foreign key exists.""" + engine = self.engine + assert engine is not None + return bool( + next( + ( + fk + for fk in inspect(engine).get_foreign_keys(TABLE_STATES) + if fk["constrained_columns"] == ["event_id"] + ), + None, + ) + ) + def _migrate_states_context_ids(self) -> bool: """Migrate states context ids if needed.""" return migration.migrate_states_context_ids(self) diff --git a/homeassistant/components/recorder/history/modern.py b/homeassistant/components/recorder/history/modern.py index b6acb6601ff..3cbec60e83f 100644 --- a/homeassistant/components/recorder/history/modern.py +++ b/homeassistant/components/recorder/history/modern.py @@ -738,16 +738,18 @@ def _sorted_states_to_dict( or split_entity_id(entity_id)[0] in NEED_ATTRIBUTE_DOMAINS ): ent_results.extend( - state_class( - db_state, - attr_cache, - start_time_ts, - entity_id, - db_state[state_idx], - db_state[last_updated_ts_idx], - False, - ) - for db_state in group + [ + state_class( + db_state, + attr_cache, + start_time_ts, + entity_id, + db_state[state_idx], + db_state[last_updated_ts_idx], + False, + ) + for db_state in group + ] ) continue diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 517ea4ca5cb..69bfc7cb2a8 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -333,11 +333,9 @@ def _create_index( index = index_list[0] _LOGGER.debug("Creating %s index", index_name) _LOGGER.warning( - ( - "Adding index `%s` to table `%s`. Note: this can take several " - "minutes on large databases and slow computers. Please " - "be patient!" - ), + "Adding index `%s` to table `%s`. Note: this can take several " + "minutes on large databases and slow computers. Please " + "be patient!", index_name, table_name, ) @@ -351,7 +349,7 @@ def _create_index( "Index %s already exists on %s, continuing", index_name, table_name ) - _LOGGER.debug("Finished creating %s", index_name) + _LOGGER.warning("Finished adding index `%s` to table `%s`", index_name, table_name) def _execute_or_collect_error( @@ -384,11 +382,9 @@ def _drop_index( DO NOT USE THIS FUNCTION IN ANY OPERATION THAT TAKES USER INPUT. """ _LOGGER.warning( - ( - "Dropping index `%s` from table `%s`. Note: this can take several " - "minutes on large databases and slow computers. Please " - "be patient!" - ), + "Dropping index `%s` from table `%s`. Note: this can take several " + "minutes on large databases and slow computers. Please " + "be patient!", index_name, table_name, ) @@ -397,8 +393,8 @@ def _drop_index( index_to_drop = get_index_by_name(session, table_name, index_name) if index_to_drop is None: - _LOGGER.debug( - "The index %s on table %s no longer exists", index_name, table_name + _LOGGER.warning( + "The index `%s` on table `%s` no longer exists", index_name, table_name ) return @@ -415,18 +411,16 @@ def _drop_index( f"DROP INDEX {index_to_drop}", ): if _execute_or_collect_error(session_maker, query, errors): - _LOGGER.debug( - "Finished dropping index %s from table %s", index_name, table_name + _LOGGER.warning( + "Finished dropping index `%s` from table `%s`", index_name, table_name ) return if not quiet: _LOGGER.warning( - ( - "Failed to drop index `%s` from table `%s`. Schema " - "Migration will continue; this is not a " - "critical operation: %s" - ), + "Failed to drop index `%s` from table `%s`. Schema " + "Migration will continue; this is not a " + "critical operation: %s", index_name, table_name, errors, diff --git a/homeassistant/components/refoss/sensor.py b/homeassistant/components/refoss/sensor.py index 9f5ee5d898a..f65724ddd77 100644 --- a/homeassistant/components/refoss/sensor.py +++ b/homeassistant/components/refoss/sensor.py @@ -91,7 +91,7 @@ SENSORS: dict[str, tuple[RefossSensorEntityDescription, ...]] = { native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, suggested_display_precision=2, subkey="mConsume", - fn=lambda x: x if x > 0 else 0, + fn=lambda x: max(0, x), ), RefossSensorEntityDescription( key="energy_returned", diff --git a/homeassistant/components/reolink/media_source.py b/homeassistant/components/reolink/media_source.py index 7a77e482f56..ae865b77913 100644 --- a/homeassistant/components/reolink/media_source.py +++ b/homeassistant/components/reolink/media_source.py @@ -5,6 +5,7 @@ from __future__ import annotations import datetime as dt import logging +from reolink_aio.api import DUAL_LENS_MODELS from reolink_aio.enums import VodRequestType from homeassistant.components.camera import DOMAIN as CAM_DOMAIN, DynamicStreamSettings @@ -184,6 +185,9 @@ class ReolinkVODMediaSource(MediaSource): if device.name_by_user is not None: device_name = device.name_by_user + if host.api.model in DUAL_LENS_MODELS: + device_name = f"{device_name} lens {ch}" + children.append( BrowseMediaSource( domain=DOMAIN, diff --git a/homeassistant/components/ring/strings.json b/homeassistant/components/ring/strings.json index 142c533fcfc..ed0319b7a4b 100644 --- a/homeassistant/components/ring/strings.json +++ b/homeassistant/components/ring/strings.json @@ -95,12 +95,12 @@ }, "issues": { "deprecated_service_ring_update": { - "title": "Detected use of deprecated service `ring.update`", + "title": "Detected use of deprecated action `ring.update`", "fix_flow": { "step": { "confirm": { "title": "[%key:component::ring::issues::deprecated_service_ring_update::title%]", - "description": "Use `homeassistant.update_entity` instead which will update all ring entities.\n\nPlease replace calls to this service and adjust your automations and scripts and select **submit** to close this issue." + "description": "Use `homeassistant.update_entity` instead which will update all ring entities.\n\nPlease replace uses of this action and adjust your automations and scripts and select **submit** to close this issue." } } } diff --git a/homeassistant/components/roborock/strings.json b/homeassistant/components/roborock/strings.json index 03ac9f5362e..081e4c68a75 100644 --- a/homeassistant/components/roborock/strings.json +++ b/homeassistant/components/roborock/strings.json @@ -217,7 +217,8 @@ "unknown": "Unknown", "locked": "Locked", "air_drying_stopping": "Air drying stopping", - "egg_attack": "Cupid mode" + "egg_attack": "Cupid mode", + "mapping": "Mapping" } }, "total_cleaning_time": { @@ -326,7 +327,8 @@ "deep": "Deep", "deep_plus": "Deep+", "custom": "Custom", - "fast": "Fast" + "fast": "Fast", + "smart_mode": "SmartPlan" } }, "mop_intensity": { @@ -337,10 +339,12 @@ "mild": "Mild", "medium": "Medium", "moderate": "Moderate", + "max": "Max", "high": "High", "intense": "Intense", "custom": "[%key:component::roborock::entity::select::mop_mode::state::custom%]", - "custom_water_flow": "Custom water flow" + "custom_water_flow": "Custom water flow", + "smart_mode": "[%key:component::roborock::entity::select::mop_mode::state::smart_mode%]" } }, "selected_map": { @@ -385,13 +389,14 @@ "custom": "[%key:component::roborock::entity::select::mop_mode::state::custom%]", "gentle": "Gentle", "off": "[%key:common::state::off%]", - "max": "Max", + "max": "[%key:component::roborock::entity::select::mop_intensity::state::max%]", "max_plus": "Max plus", "medium": "Medium", "quiet": "Quiet", "silent": "Silent", "standard": "[%key:component::roborock::entity::select::mop_mode::state::standard%]", - "turbo": "Turbo" + "turbo": "Turbo", + "smart_mode": "[%key:component::roborock::entity::select::mop_mode::state::smart_mode%]" } } } diff --git a/homeassistant/components/route53/manifest.json b/homeassistant/components/route53/manifest.json index d4ce0d2cc97..6db240bdcab 100644 --- a/homeassistant/components/route53/manifest.json +++ b/homeassistant/components/route53/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/route53", "iot_class": "cloud_push", "loggers": ["boto3", "botocore", "s3transfer"], - "requirements": ["boto3==1.34.51"] + "requirements": ["boto3==1.34.131"] } diff --git a/homeassistant/components/russound_rio/__init__.py b/homeassistant/components/russound_rio/__init__.py index 6d7fe3b1215..1560a4cd332 100644 --- a/homeassistant/components/russound_rio/__init__.py +++ b/homeassistant/components/russound_rio/__init__.py @@ -1 +1,45 @@ """The russound_rio component.""" + +import asyncio +import logging + +from aiorussound import Russound + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST, CONF_PORT, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryError + +from .const import CONNECT_TIMEOUT, RUSSOUND_RIO_EXCEPTIONS + +PLATFORMS = [Platform.MEDIA_PLAYER] + +_LOGGER = logging.getLogger(__name__) + +type RussoundConfigEntry = ConfigEntry[Russound] + + +async def async_setup_entry(hass: HomeAssistant, entry: RussoundConfigEntry) -> bool: + """Set up a config entry.""" + + russ = Russound(hass.loop, entry.data[CONF_HOST], entry.data[CONF_PORT]) + + try: + async with asyncio.timeout(CONNECT_TIMEOUT): + await russ.connect() + except RUSSOUND_RIO_EXCEPTIONS as err: + raise ConfigEntryError(err) from err + + entry.runtime_data = russ + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload a config entry.""" + if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): + await entry.runtime_data.close() + + return unload_ok diff --git a/homeassistant/components/russound_rio/config_flow.py b/homeassistant/components/russound_rio/config_flow.py new file mode 100644 index 00000000000..9ad0d25ff94 --- /dev/null +++ b/homeassistant/components/russound_rio/config_flow.py @@ -0,0 +1,114 @@ +"""Config flow to configure russound_rio component.""" + +from __future__ import annotations + +import asyncio +import logging +from typing import Any + +from aiorussound import Russound +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_HOST, CONF_PORT +from homeassistant.helpers import config_validation as cv + +from .const import ( + CONNECT_TIMEOUT, + DOMAIN, + RUSSOUND_RIO_EXCEPTIONS, + NoPrimaryControllerException, +) + +DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_HOST): cv.string, + vol.Optional(CONF_PORT, default=9621): cv.port, + } +) + +_LOGGER = logging.getLogger(__name__) + + +def find_primary_controller_metadata( + controllers: list[tuple[int, str, str]], +) -> tuple[str, str]: + """Find the mac address of the primary Russound controller.""" + for controller_id, mac_address, controller_type in controllers: + # The integration only cares about the primary controller linked by IP and not any downstream controllers + if controller_id == 1: + return (mac_address, controller_type) + raise NoPrimaryControllerException + + +class FlowHandler(ConfigFlow, domain=DOMAIN): + """Russound RIO configuration flow.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow initialized by the user.""" + errors: dict[str, str] = {} + if user_input is not None: + host = user_input[CONF_HOST] + port = user_input[CONF_PORT] + + controllers = None + russ = Russound(self.hass.loop, host, port) + try: + async with asyncio.timeout(CONNECT_TIMEOUT): + await russ.connect() + controllers = await russ.enumerate_controllers() + metadata = find_primary_controller_metadata(controllers) + await russ.close() + except RUSSOUND_RIO_EXCEPTIONS: + _LOGGER.exception("Could not connect to Russound RIO") + errors["base"] = "cannot_connect" + except NoPrimaryControllerException: + _LOGGER.exception( + "Russound RIO device doesn't have a primary controller", + ) + errors["base"] = "no_primary_controller" + else: + await self.async_set_unique_id(metadata[0]) + self._abort_if_unique_id_configured() + data = {CONF_HOST: host, CONF_PORT: port} + return self.async_create_entry(title=metadata[1], data=data) + + return self.async_show_form( + step_id="user", data_schema=DATA_SCHEMA, errors=errors + ) + + async def async_step_import( + self, import_config: dict[str, Any] + ) -> ConfigFlowResult: + """Attempt to import the existing configuration.""" + self._async_abort_entries_match({CONF_HOST: import_config[CONF_HOST]}) + host = import_config[CONF_HOST] + port = import_config.get(CONF_PORT, 9621) + + # Connection logic is repeated here since this method will be removed in future releases + russ = Russound(self.hass.loop, host, port) + try: + async with asyncio.timeout(CONNECT_TIMEOUT): + await russ.connect() + controllers = await russ.enumerate_controllers() + metadata = find_primary_controller_metadata(controllers) + await russ.close() + except RUSSOUND_RIO_EXCEPTIONS: + _LOGGER.exception("Could not connect to Russound RIO") + return self.async_abort( + reason="cannot_connect", description_placeholders={} + ) + except NoPrimaryControllerException: + _LOGGER.exception("Russound RIO device doesn't have a primary controller") + return self.async_abort( + reason="no_primary_controller", description_placeholders={} + ) + else: + await self.async_set_unique_id(metadata[0]) + self._abort_if_unique_id_configured() + data = {CONF_HOST: host, CONF_PORT: port} + return self.async_create_entry(title=metadata[1], data=data) diff --git a/homeassistant/components/russound_rio/const.py b/homeassistant/components/russound_rio/const.py new file mode 100644 index 00000000000..e5bf81e464a --- /dev/null +++ b/homeassistant/components/russound_rio/const.py @@ -0,0 +1,21 @@ +"""Constants used for Russound RIO.""" + +import asyncio + +from aiorussound import CommandException + +DOMAIN = "russound_rio" + +RUSSOUND_RIO_EXCEPTIONS = ( + CommandException, + ConnectionRefusedError, + TimeoutError, + asyncio.CancelledError, +) + + +class NoPrimaryControllerException(Exception): + """Thrown when the Russound device is not the primary unit in the RNET stack.""" + + +CONNECT_TIMEOUT = 5 diff --git a/homeassistant/components/russound_rio/manifest.json b/homeassistant/components/russound_rio/manifest.json index 418e9689d83..43cf8e7850f 100644 --- a/homeassistant/components/russound_rio/manifest.json +++ b/homeassistant/components/russound_rio/manifest.json @@ -1,7 +1,8 @@ { "domain": "russound_rio", "name": "Russound RIO", - "codeowners": [], + "codeowners": ["@noahhusby"], + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/russound_rio", "iot_class": "local_push", "loggers": ["aiorussound"], diff --git a/homeassistant/components/russound_rio/media_player.py b/homeassistant/components/russound_rio/media_player.py index 334fccc08ab..e3eae51eb9e 100644 --- a/homeassistant/components/russound_rio/media_player.py +++ b/homeassistant/components/russound_rio/media_player.py @@ -2,34 +2,26 @@ from __future__ import annotations -from aiorussound import Russound -import voluptuous as vol +import logging from homeassistant.components.media_player import ( - PLATFORM_SCHEMA as MEDIA_PLAYER_PLATFORM_SCHEMA, MediaPlayerEntity, MediaPlayerEntityFeature, MediaPlayerState, MediaType, ) -from homeassistant.const import ( - CONF_HOST, - CONF_NAME, - CONF_PORT, - EVENT_HOMEASSISTANT_STOP, -) -from homeassistant.core import HomeAssistant, callback -import homeassistant.helpers.config_validation as cv +from homeassistant.config_entries import SOURCE_IMPORT +from homeassistant.const import EVENT_HOMEASSISTANT_STOP +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback +from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -PLATFORM_SCHEMA = MEDIA_PLAYER_PLATFORM_SCHEMA.extend( - { - vol.Required(CONF_HOST): cv.string, - vol.Required(CONF_NAME): cv.string, - vol.Optional(CONF_PORT, default=9621): cv.port, - } -) +from . import RussoundConfigEntry +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) async def async_setup_platform( @@ -40,22 +32,69 @@ async def async_setup_platform( ) -> None: """Set up the Russound RIO platform.""" - host = config.get(CONF_HOST) - port = config.get(CONF_PORT) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=config, + ) + if ( + result["type"] is FlowResultType.CREATE_ENTRY + or result["reason"] == "single_instance_allowed" + ): + async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + breaks_in_ha_version="2025.2.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "Russound RIO", + }, + ) + return + async_create_issue( + hass, + DOMAIN, + f"deprecated_yaml_import_issue_{result['reason']}", + breaks_in_ha_version="2025.2.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key=f"deprecated_yaml_import_issue_{result['reason']}", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "Russound RIO", + }, + ) - russ = Russound(hass.loop, host, port) - await russ.connect() +async def async_setup_entry( + hass: HomeAssistant, + entry: RussoundConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Russound RIO platform.""" + russ = entry.runtime_data # Discover sources and zones sources = await russ.enumerate_sources() valid_zones = await russ.enumerate_zones() - devices = [] + entities = [] for zone_id, name in valid_zones: + if zone_id.controller > 6: + _LOGGER.debug( + "Zone ID %s exceeds RIO controller maximum, skipping", + zone_id.device_str(), + ) + continue await russ.watch_zone(zone_id) - dev = RussoundZoneDevice(russ, zone_id, name, sources) - devices.append(dev) + zone = RussoundZoneDevice(russ, zone_id, name, sources) + entities.append(zone) @callback def on_stop(event): @@ -64,7 +103,7 @@ async def async_setup_platform( hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, on_stop) - async_add_entities(devices) + async_add_entities(entities) class RussoundZoneDevice(MediaPlayerEntity): @@ -80,7 +119,7 @@ class RussoundZoneDevice(MediaPlayerEntity): | MediaPlayerEntityFeature.SELECT_SOURCE ) - def __init__(self, russ, zone_id, name, sources): + def __init__(self, russ, zone_id, name, sources) -> None: """Initialize the zone device.""" super().__init__() self._name = name diff --git a/homeassistant/components/russound_rio/strings.json b/homeassistant/components/russound_rio/strings.json new file mode 100644 index 00000000000..a8b89e3dae3 --- /dev/null +++ b/homeassistant/components/russound_rio/strings.json @@ -0,0 +1,40 @@ +{ + "common": { + "error_cannot_connect": "Failed to connect to Russound device. Please make sure the device is powered up and connected to the network. Try power-cycling the device if it does not connect.", + "error_no_primary_controller": "No primary controller was detected for the Russound device. Please make sure that the target Russound device has it's controller ID set to 1 (using the selector on the back of the unit)." + }, + "config": { + "step": { + "user": { + "data": { + "host": "[%key:common::config_flow::data::host%]", + "name": "[%key:common::config_flow::data::name%]", + "port": "[%key:common::config_flow::data::port%]" + } + } + }, + "error": { + "cannot_connect": "[%key:component::russound_rio::common::error_cannot_connect%]", + "no_primary_controller": "[%key:component::russound_rio::common::error_no_primary_controller%]" + }, + "abort": { + "cannot_connect": "[%key:component::russound_rio::common::error_cannot_connect%]", + "no_primary_controller": "[%key:component::russound_rio::common::error_no_primary_controller%]", + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + }, + "issues": { + "deprecated_yaml_import_issue_cannot_connect": { + "title": "The {integration_title} YAML configuration import cannot connect to the Russound device", + "description": "Configuring {integration_title} using YAML is being removed but there was a connection error importing your YAML configuration.\n\nPlease make sure {integration_title} is turned on, and restart Home Assistant to try importing again. Otherwise, please remove the YAML from your configuration and add the integration manually." + }, + "deprecated_yaml_import_issue_no_primary_controller": { + "title": "The {integration_title} YAML configuration import cannot configure the Russound Device.", + "description": "Configuring {integration_title} using YAML is being removed but there was a connection error importing your YAML configuration.\n\nNo primary controller was detected for the Russound device. Please make sure that the target Russound device has it's controller ID set to 1 (using the selector on the back of the unit)." + }, + "deprecated_yaml_import_issue_unknown": { + "title": "[%key:component::russound_rio::issues::deprecated_yaml_import_issue_cannot_connect::title%]", + "description": "[%key:component::russound_rio::issues::deprecated_yaml_import_issue_cannot_connect::description%]" + } + } +} diff --git a/homeassistant/components/scene/strings.json b/homeassistant/components/scene/strings.json index af91b2e227e..3fa750bf4ef 100644 --- a/homeassistant/components/scene/strings.json +++ b/homeassistant/components/scene/strings.json @@ -57,7 +57,7 @@ "message": "{entity_id} is not a valid scene entity_id." }, "entity_not_dynamically_created": { - "message": "The scene {entity_id} is not created with service `scene.create`." + "message": "The scene {entity_id} is not created with action `scene.create`." } } } diff --git a/homeassistant/components/screenlogic/strings.json b/homeassistant/components/screenlogic/strings.json index 755eeb4ffb2..2370d78a6ce 100644 --- a/homeassistant/components/screenlogic/strings.json +++ b/homeassistant/components/screenlogic/strings.json @@ -43,7 +43,7 @@ "fields": { "config_entry": { "name": "Config Entry", - "description": "The config entry to use for this service." + "description": "The config entry to use for this action." }, "color_mode": { "name": "Color Mode", @@ -57,7 +57,7 @@ "fields": { "config_entry": { "name": "Config Entry", - "description": "The config entry to use for this service." + "description": "The config entry to use for this action." }, "runtime": { "name": "Run Time", @@ -71,19 +71,19 @@ "fields": { "config_entry": { "name": "Config Entry", - "description": "The config entry to use for this service." + "description": "The config entry to use for this action." } } } }, "issues": { "service_target_deprecation": { - "title": "Deprecating use of target for ScreenLogic services", + "title": "Deprecating use of target for ScreenLogic actions", "fix_flow": { "step": { "confirm": { - "title": "Deprecating target for ScreenLogic services", - "description": "Use of an Area, Device, or Entity as a target for ScreenLogic services is being deprecated. Instead, use `config_entry` with the entry_id of the desired ScreenLogic integration.\n\nPlease update your automations and scripts and select **submit** to fix this issue." + "title": "Deprecating target for ScreenLogic actions", + "description": "Use of an Area, Device, or Entity as a target for ScreenLogic actions is being deprecated. Instead, use `config_entry` with the entry_id of the desired ScreenLogic integration.\n\nPlease update your automations and scripts and select **submit** to fix this issue." } } } diff --git a/homeassistant/components/search/__init__.py b/homeassistant/components/search/__init__.py index a85a21e8102..adec8ff1257 100644 --- a/homeassistant/components/search/__init__.py +++ b/homeassistant/components/search/__init__.py @@ -43,6 +43,7 @@ class ItemType(StrEnum): ENTITY = "entity" FLOOR = "floor" GROUP = "group" + INTEGRATION = "integration" LABEL = "label" PERSON = "person" SCENE = "scene" @@ -545,6 +546,9 @@ class Searcher: self._async_resolve_up_area(device_entry.area_id) self._add(ItemType.CONFIG_ENTRY, device_entry.config_entries) + for config_entry_id in device_entry.config_entries: + if entry := self.hass.config_entries.async_get_entry(config_entry_id): + self._add(ItemType.INTEGRATION, entry.domain) return device_entry @@ -573,10 +577,19 @@ class Searcher: self._add(ItemType.DEVICE, entity_entry.device_id) # Add config entry that provided this entity - self._add(ItemType.CONFIG_ENTRY, entity_entry.config_entry_id) + if entity_entry.config_entry_id: + self._add(ItemType.CONFIG_ENTRY, entity_entry.config_entry_id) + + if entry := self.hass.config_entries.async_get_entry( + entity_entry.config_entry_id + ): + # Add integration that provided this entity + self._add(ItemType.INTEGRATION, entry.domain) + elif source := self._entity_sources.get(entity_id): # Add config entry that provided this entity self._add(ItemType.CONFIG_ENTRY, source.get("config_entry")) + self._add(ItemType.INTEGRATION, source["domain"]) return entity_entry diff --git a/homeassistant/components/sensibo/strings.json b/homeassistant/components/sensibo/strings.json index a5f71e53c17..d93c2a54adb 100644 --- a/homeassistant/components/sensibo/strings.json +++ b/homeassistant/components/sensibo/strings.json @@ -499,16 +499,16 @@ "message": "Climate swing mode {swing_mode} is not supported by the integration, please open an issue" }, "service_result_not_true": { - "message": "Could not execute service for {name}" + "message": "Could not perform action for {name}" }, "service_raised": { - "message": "Could not execute service for {name} with error {error}" + "message": "Could not perform action for {name} with error {error}" }, "select_option_not_available": { "message": "Current mode {hvac_mode} doesn't support setting {key}" }, "climate_react_not_available": { - "message": "Use Sensibo Enable Climate React Service once to enable switch or the Sensibo app" + "message": "Use Sensibo Enable Climate React action once to enable switch or the Sensibo app" } } } diff --git a/homeassistant/components/sensor/__init__.py b/homeassistant/components/sensor/__init__.py index 63b853f971e..e7f4b00fd77 100644 --- a/homeassistant/components/sensor/__init__.py +++ b/homeassistant/components/sensor/__init__.py @@ -394,11 +394,20 @@ class SensorEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): suggested_unit_of_measurement = self.suggested_unit_of_measurement if suggested_unit_of_measurement is None: - # Fallback to suggested by the unit conversion rules + # Fallback to unit suggested by the unit conversion rules from device class suggested_unit_of_measurement = self.hass.config.units.get_converted_unit( self.device_class, self.native_unit_of_measurement ) + if suggested_unit_of_measurement is None and ( + unit_converter := UNIT_CONVERTERS.get(self.device_class) + ): + # If the device class is not known by the unit system but has a unit converter, + # fall back to the unit suggested by the unit converter's unit class. + suggested_unit_of_measurement = self.hass.config.units.get_converted_unit( + unit_converter.UNIT_CLASS, self.native_unit_of_measurement + ) + if suggested_unit_of_measurement is None: return UNDEFINED diff --git a/homeassistant/components/sensor/recorder.py b/homeassistant/components/sensor/recorder.py index 940592d7b08..c02c3ce7b7a 100644 --- a/homeassistant/components/sensor/recorder.py +++ b/homeassistant/components/sensor/recorder.py @@ -109,7 +109,7 @@ def _time_weighted_average( for fstate, state in fstates: # The recorder will give us the last known state, which may be well # before the requested start time for the statistics - start_time = start if state.last_updated < start else state.last_updated + start_time = max(state.last_updated, start) if old_start_time is None: # Adjust start time, if there was no last known state start = start_time diff --git a/homeassistant/components/serial/manifest.json b/homeassistant/components/serial/manifest.json index a8bcc335991..cb5dc9ee100 100644 --- a/homeassistant/components/serial/manifest.json +++ b/homeassistant/components/serial/manifest.json @@ -4,5 +4,5 @@ "codeowners": ["@fabaff"], "documentation": "https://www.home-assistant.io/integrations/serial", "iot_class": "local_polling", - "requirements": ["pyserial-asyncio-fast==0.11"] + "requirements": ["pyserial-asyncio-fast==0.13"] } diff --git a/homeassistant/components/seventeentrack/strings.json b/homeassistant/components/seventeentrack/strings.json index d166fb32673..0fbac13736e 100644 --- a/homeassistant/components/seventeentrack/strings.json +++ b/homeassistant/components/seventeentrack/strings.json @@ -52,7 +52,7 @@ "step": { "confirm": { "title": "[%key:component::seventeentrack::issues::deprecate_sensor::title%]", - "description": "17Track package sensors are deprecated and will be removed.\nPlease update your automations and scripts to get data using the `seventeentrack.get_packages` service call." + "description": "17Track package sensors are deprecated and will be removed.\nPlease update your automations and scripts to get data using the `seventeentrack.get_packages` action." } } } diff --git a/homeassistant/components/shelly/__init__.py b/homeassistant/components/shelly/__init__.py index 75f66d0bced..6f0f9e9cdbf 100644 --- a/homeassistant/components/shelly/__init__.py +++ b/homeassistant/components/shelly/__init__.py @@ -61,8 +61,10 @@ PLATFORMS: Final = [ Platform.COVER, Platform.EVENT, Platform.LIGHT, + Platform.NUMBER, Platform.SENSOR, Platform.SWITCH, + Platform.TEXT, Platform.UPDATE, Platform.VALVE, ] diff --git a/homeassistant/components/shelly/binary_sensor.py b/homeassistant/components/shelly/binary_sensor.py index bdbf5904b15..bc2ba3326a7 100644 --- a/homeassistant/components/shelly/binary_sensor.py +++ b/homeassistant/components/shelly/binary_sensor.py @@ -8,6 +8,7 @@ from typing import Final, cast from aioshelly.const import RPC_GENERATIONS from homeassistant.components.binary_sensor import ( + DOMAIN as BINARY_SENSOR_PLATFORM, BinarySensorDeviceClass, BinarySensorEntity, BinarySensorEntityDescription, @@ -33,7 +34,9 @@ from .entity import ( async_setup_entry_rpc, ) from .utils import ( + async_remove_orphaned_virtual_entities, get_device_entry_gen, + get_virtual_component_ids, is_block_momentary_input, is_rpc_momentary_input, ) @@ -215,6 +218,11 @@ RPC_SENSORS: Final = { entity_registry_enabled_default=False, entity_category=EntityCategory.DIAGNOSTIC, ), + "boolean": RpcBinarySensorDescription( + key="boolean", + sub_key="value", + has_entity_name=True, + ), } @@ -234,9 +242,26 @@ async def async_setup_entry( RpcSleepingBinarySensor, ) else: + coordinator = config_entry.runtime_data.rpc + assert coordinator + async_setup_entry_rpc( hass, config_entry, async_add_entities, RPC_SENSORS, RpcBinarySensor ) + + # the user can remove virtual components from the device configuration, so + # we need to remove orphaned entities + virtual_binary_sensor_ids = get_virtual_component_ids( + coordinator.device.config, BINARY_SENSOR_PLATFORM + ) + async_remove_orphaned_virtual_entities( + hass, + config_entry.entry_id, + coordinator.mac, + BINARY_SENSOR_PLATFORM, + "boolean", + virtual_binary_sensor_ids, + ) return if config_entry.data[CONF_SLEEP_PERIOD]: diff --git a/homeassistant/components/shelly/const.py b/homeassistant/components/shelly/const.py index c5bdb88bbd1..b03452fa41f 100644 --- a/homeassistant/components/shelly/const.py +++ b/homeassistant/components/shelly/const.py @@ -27,6 +27,8 @@ from aioshelly.const import ( MODEL_WALL_DISPLAY, ) +from homeassistant.components.number import NumberMode + DOMAIN: Final = "shelly" LOGGER: Logger = getLogger(__package__) @@ -238,3 +240,16 @@ DEVICES_WITHOUT_FIRMWARE_CHANGELOG = ( CONF_GEN = "gen" SHELLY_PLUS_RGBW_CHANNELS = 4 + +VIRTUAL_COMPONENTS_MAP = { + "binary_sensor": {"types": ["boolean"], "modes": ["label"]}, + "number": {"types": ["number"], "modes": ["field", "slider"]}, + "sensor": {"types": ["number", "text"], "modes": ["label"]}, + "switch": {"types": ["boolean"], "modes": ["toggle"]}, + "text": {"types": ["text"], "modes": ["field"]}, +} + +VIRTUAL_NUMBER_MODE_MAP = { + "field": NumberMode.BOX, + "slider": NumberMode.SLIDER, +} diff --git a/homeassistant/components/shelly/coordinator.py b/homeassistant/components/shelly/coordinator.py index 33ed07c35de..ea9bb4bbabb 100644 --- a/homeassistant/components/shelly/coordinator.py +++ b/homeassistant/components/shelly/coordinator.py @@ -61,6 +61,7 @@ from .utils import ( async_create_issue_unsupported_firmware, get_block_device_sleep_period, get_device_entry_gen, + get_host, get_http_port, get_rpc_device_wakeup_period, update_device_fw_info, @@ -147,7 +148,7 @@ class ShellyCoordinatorBase[_DeviceT: BlockDevice | RpcDevice]( model=MODEL_NAMES.get(self.model, self.model), sw_version=self.sw_version, hw_version=f"gen{get_device_entry_gen(self.entry)} ({self.model})", - configuration_url=f"http://{self.entry.data[CONF_HOST]}:{get_http_port(self.entry.data)}", + configuration_url=f"http://{get_host(self.entry.data[CONF_HOST])}:{get_http_port(self.entry.data)}", ) self.device_id = device_entry.id @@ -551,7 +552,7 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): for event_callback in self._event_listeners: event_callback(event) - if event_type == "config_changed": + if event_type in ("component_added", "component_removed", "config_changed"): self.update_sleep_period() LOGGER.info( "Config for %s changed, reloading entry in %s seconds", @@ -739,6 +740,7 @@ class ShellyRpcPollingCoordinator(ShellyCoordinatorBase[RpcDevice]): LOGGER.debug("Polling Shelly RPC Device - %s", self.name) try: await self.device.update_status() + await self.device.get_dynamic_components() except (DeviceConnectionError, RpcCallError) as err: raise UpdateFailed(f"Device disconnected: {err!r}") from err except InvalidAuthError: diff --git a/homeassistant/components/shelly/entity.py b/homeassistant/components/shelly/entity.py index e1530a669a1..24e4f50d47e 100644 --- a/homeassistant/components/shelly/entity.py +++ b/homeassistant/components/shelly/entity.py @@ -291,6 +291,7 @@ class RpcEntityDescription(EntityDescription): extra_state_attributes: Callable[[dict, dict], dict | None] | None = None use_polling_coordinator: bool = False supported: Callable = lambda _: False + unit: Callable[[dict], str | None] | None = None @dataclass(frozen=True) @@ -505,6 +506,13 @@ class ShellyRpcAttributeEntity(ShellyRpcEntity, Entity): self._attr_unique_id = f"{super().unique_id}-{attribute}" self._attr_name = get_rpc_entity_name(coordinator.device, key, description.name) self._last_value = None + id_key = key.split(":")[-1] + self._id = int(id_key) if id_key.isnumeric() else None + + if callable(description.unit): + self._attr_native_unit_of_measurement = description.unit( + coordinator.device.config[key] + ) @property def sub_status(self) -> Any: diff --git a/homeassistant/components/shelly/manifest.json b/homeassistant/components/shelly/manifest.json index 4076f53c28c..1e65a51733d 100644 --- a/homeassistant/components/shelly/manifest.json +++ b/homeassistant/components/shelly/manifest.json @@ -9,7 +9,7 @@ "iot_class": "local_push", "loggers": ["aioshelly"], "quality_scale": "platinum", - "requirements": ["aioshelly==11.0.0"], + "requirements": ["aioshelly==11.1.0"], "zeroconf": [ { "type": "_http._tcp.local.", diff --git a/homeassistant/components/shelly/number.py b/homeassistant/components/shelly/number.py index afc508dd94f..67c33faf150 100644 --- a/homeassistant/components/shelly/number.py +++ b/homeassistant/components/shelly/number.py @@ -2,13 +2,17 @@ from __future__ import annotations +from collections.abc import Callable from dataclasses import dataclass -from typing import Any, cast +from typing import TYPE_CHECKING, Any, Final, cast from aioshelly.block_device import Block +from aioshelly.const import RPC_GENERATIONS from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError from homeassistant.components.number import ( + DOMAIN as NUMBER_PLATFORM, + NumberEntity, NumberEntityDescription, NumberExtraStoredData, NumberMode, @@ -20,12 +24,20 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.entity_registry import RegistryEntry -from .const import CONF_SLEEP_PERIOD, LOGGER -from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry +from .const import CONF_SLEEP_PERIOD, LOGGER, VIRTUAL_NUMBER_MODE_MAP +from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry, ShellyRpcCoordinator from .entity import ( BlockEntityDescription, + RpcEntityDescription, + ShellyRpcAttributeEntity, ShellySleepingBlockAttributeEntity, async_setup_entry_attribute_entities, + async_setup_entry_rpc, +) +from .utils import ( + async_remove_orphaned_virtual_entities, + get_device_entry_gen, + get_virtual_component_ids, ) @@ -37,6 +49,16 @@ class BlockNumberDescription(BlockEntityDescription, NumberEntityDescription): rest_arg: str = "" +@dataclass(frozen=True, kw_only=True) +class RpcNumberDescription(RpcEntityDescription, NumberEntityDescription): + """Class to describe a RPC number entity.""" + + max_fn: Callable[[dict], float] | None = None + min_fn: Callable[[dict], float] | None = None + step_fn: Callable[[dict], float] | None = None + mode_fn: Callable[[dict], NumberMode] | None = None + + NUMBERS: dict[tuple[str, str], BlockNumberDescription] = { ("device", "valvePos"): BlockNumberDescription( key="device|valvepos", @@ -55,12 +77,54 @@ NUMBERS: dict[tuple[str, str], BlockNumberDescription] = { } +RPC_NUMBERS: Final = { + "number": RpcNumberDescription( + key="number", + sub_key="value", + has_entity_name=True, + max_fn=lambda config: config["max"], + min_fn=lambda config: config["min"], + mode_fn=lambda config: VIRTUAL_NUMBER_MODE_MAP.get( + config["meta"]["ui"]["view"], NumberMode.BOX + ), + step_fn=lambda config: config["meta"]["ui"]["step"], + # If the unit is not set, the device sends an empty string + unit=lambda config: config["meta"]["ui"]["unit"] + if config["meta"]["ui"]["unit"] + else None, + ), +} + + async def async_setup_entry( hass: HomeAssistant, config_entry: ShellyConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up numbers for device.""" + if get_device_entry_gen(config_entry) in RPC_GENERATIONS: + coordinator = config_entry.runtime_data.rpc + assert coordinator + + async_setup_entry_rpc( + hass, config_entry, async_add_entities, RPC_NUMBERS, RpcNumber + ) + + # the user can remove virtual components from the device configuration, so + # we need to remove orphaned entities + virtual_number_ids = get_virtual_component_ids( + coordinator.device.config, NUMBER_PLATFORM + ) + async_remove_orphaned_virtual_entities( + hass, + config_entry.entry_id, + coordinator.mac, + NUMBER_PLATFORM, + "number", + virtual_number_ids, + ) + return + if config_entry.data[CONF_SLEEP_PERIOD]: async_setup_entry_attribute_entities( hass, @@ -126,3 +190,44 @@ class BlockSleepingNumber(ShellySleepingBlockAttributeEntity, RestoreNumber): ) from err except InvalidAuthError: await self.coordinator.async_shutdown_device_and_start_reauth() + + +class RpcNumber(ShellyRpcAttributeEntity, NumberEntity): + """Represent a RPC number entity.""" + + entity_description: RpcNumberDescription + + def __init__( + self, + coordinator: ShellyRpcCoordinator, + key: str, + attribute: str, + description: RpcNumberDescription, + ) -> None: + """Initialize sensor.""" + super().__init__(coordinator, key, attribute, description) + + if callable(description.max_fn): + self._attr_native_max_value = description.max_fn( + coordinator.device.config[key] + ) + if callable(description.min_fn): + self._attr_native_min_value = description.min_fn( + coordinator.device.config[key] + ) + if callable(description.step_fn): + self._attr_native_step = description.step_fn(coordinator.device.config[key]) + if callable(description.mode_fn): + self._attr_mode = description.mode_fn(coordinator.device.config[key]) + + @property + def native_value(self) -> float | None: + """Return value of number.""" + if TYPE_CHECKING: + assert isinstance(self.attribute_value, float | None) + + return self.attribute_value + + async def async_set_native_value(self, value: float) -> None: + """Change the value.""" + await self.call_rpc("Number.Set", {"id": self._id, "value": value}) diff --git a/homeassistant/components/shelly/sensor.py b/homeassistant/components/shelly/sensor.py index 5a6f03fd90c..cc782db6bad 100644 --- a/homeassistant/components/shelly/sensor.py +++ b/homeassistant/components/shelly/sensor.py @@ -9,6 +9,7 @@ from aioshelly.block_device import Block from aioshelly.const import RPC_GENERATIONS from homeassistant.components.sensor import ( + DOMAIN as SENSOR_PLATFORM, RestoreSensor, SensorDeviceClass, SensorEntity, @@ -52,8 +53,10 @@ from .entity import ( async_setup_entry_rpc, ) from .utils import ( + async_remove_orphaned_virtual_entities, get_device_entry_gen, get_device_uptime, + get_virtual_component_ids, is_rpc_wifi_stations_disabled, ) @@ -1016,6 +1019,19 @@ RPC_SENSORS: Final = { or status[key].get("xfreq") is None ), ), + "text": RpcSensorDescription( + key="text", + sub_key="value", + has_entity_name=True, + ), + "number": RpcSensorDescription( + key="number", + sub_key="value", + has_entity_name=True, + unit=lambda config: config["meta"]["ui"]["unit"] + if config["meta"]["ui"]["unit"] + else None, + ), } @@ -1035,9 +1051,27 @@ async def async_setup_entry( RpcSleepingSensor, ) else: + coordinator = config_entry.runtime_data.rpc + assert coordinator + async_setup_entry_rpc( hass, config_entry, async_add_entities, RPC_SENSORS, RpcSensor ) + + # the user can remove virtual components from the device configuration, so + # we need to remove orphaned entities + for component in ("text", "number"): + virtual_component_ids = get_virtual_component_ids( + coordinator.device.config, SENSOR_PLATFORM + ) + async_remove_orphaned_virtual_entities( + hass, + config_entry.entry_id, + coordinator.mac, + SENSOR_PLATFORM, + component, + virtual_component_ids, + ) return if config_entry.data[CONF_SLEEP_PERIOD]: diff --git a/homeassistant/components/shelly/strings.json b/homeassistant/components/shelly/strings.json index 3a71874f2dd..8ae4ff1f3e4 100644 --- a/homeassistant/components/shelly/strings.json +++ b/homeassistant/components/shelly/strings.json @@ -176,14 +176,6 @@ "title": "Shelly device {device_name} push update failure", "description": "Home Assistant is not receiving push updates from the Shelly device {device_name} with IP address {ip_address}. Check the CoIoT configuration in the web panel of the device and your network configuration." }, - "deprecated_valve_switch": { - "title": "The switch entity for Shelly Gas Valve is deprecated", - "description": "The switch entity for Shelly Gas Valve is deprecated. A valve entity {entity} is available and should be used going forward. For this new valve entity you need to use {service} service." - }, - "deprecated_valve_switch_entity": { - "title": "Deprecated switch entity for Shelly Gas Valve detected in {info}", - "description": "Your Shelly Gas Valve entity `{entity}` is being used in `{info}`. A valve entity is available and should be used going forward.\n\nPlease adjust `{info}` to fix this issue." - }, "unsupported_firmware": { "title": "Unsupported firmware for device {device_name}", "description": "Your Shelly device {device_name} with IP address {ip_address} is running an unsupported firmware. Please update the firmware.\n\nIf the device does not offer an update, check internet connectivity (gateway, DNS, time) and restart the device." diff --git a/homeassistant/components/shelly/switch.py b/homeassistant/components/shelly/switch.py index 09ee133589b..2b9b1cadc69 100644 --- a/homeassistant/components/shelly/switch.py +++ b/homeassistant/components/shelly/switch.py @@ -8,7 +8,11 @@ from typing import Any, cast from aioshelly.block_device import Block from aioshelly.const import MODEL_2, MODEL_25, MODEL_WALL_DISPLAY, RPC_GENERATIONS -from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription +from homeassistant.components.switch import ( + DOMAIN as SWITCH_PLATFORM, + SwitchEntity, + SwitchEntityDescription, +) from homeassistant.const import STATE_ON, EntityCategory from homeassistant.core import HomeAssistant, State, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -19,15 +23,20 @@ from .const import CONF_SLEEP_PERIOD, MOTION_MODELS from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry, ShellyRpcCoordinator from .entity import ( BlockEntityDescription, + RpcEntityDescription, ShellyBlockEntity, + ShellyRpcAttributeEntity, ShellyRpcEntity, ShellySleepingBlockAttributeEntity, async_setup_entry_attribute_entities, + async_setup_rpc_attribute_entities, ) from .utils import ( + async_remove_orphaned_virtual_entities, async_remove_shelly_entity, get_device_entry_gen, get_rpc_key_ids, + get_virtual_component_ids, is_block_channel_type_light, is_rpc_channel_type_light, is_rpc_thermostat_internal_actuator, @@ -47,6 +56,17 @@ MOTION_SWITCH = BlockSwitchDescription( ) +@dataclass(frozen=True, kw_only=True) +class RpcSwitchDescription(RpcEntityDescription, SwitchEntityDescription): + """Class to describe a RPC virtual switch.""" + + +RPC_VIRTUAL_SWITCH = RpcSwitchDescription( + key="boolean", + sub_key="value", +) + + async def async_setup_entry( hass: HomeAssistant, config_entry: ShellyConfigEntry, @@ -148,6 +168,28 @@ def async_setup_rpc_entry( unique_id = f"{coordinator.mac}-switch:{id_}" async_remove_shelly_entity(hass, "light", unique_id) + async_setup_rpc_attribute_entities( + hass, + config_entry, + async_add_entities, + {"boolean": RPC_VIRTUAL_SWITCH}, + RpcVirtualSwitch, + ) + + # the user can remove virtual components from the device configuration, so we need + # to remove orphaned entities + virtual_switch_ids = get_virtual_component_ids( + coordinator.device.config, SWITCH_PLATFORM + ) + async_remove_orphaned_virtual_entities( + hass, + config_entry.entry_id, + coordinator.mac, + SWITCH_PLATFORM, + "boolean", + virtual_switch_ids, + ) + if not switch_ids: return @@ -255,3 +297,23 @@ class RpcRelaySwitch(ShellyRpcEntity, SwitchEntity): async def async_turn_off(self, **kwargs: Any) -> None: """Turn off relay.""" await self.call_rpc("Switch.Set", {"id": self._id, "on": False}) + + +class RpcVirtualSwitch(ShellyRpcAttributeEntity, SwitchEntity): + """Entity that controls a virtual boolean component on RPC based Shelly devices.""" + + entity_description: RpcSwitchDescription + _attr_has_entity_name = True + + @property + def is_on(self) -> bool: + """If switch is on.""" + return bool(self.attribute_value) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on relay.""" + await self.call_rpc("Boolean.Set", {"id": self._id, "value": True}) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off relay.""" + await self.call_rpc("Boolean.Set", {"id": self._id, "value": False}) diff --git a/homeassistant/components/shelly/text.py b/homeassistant/components/shelly/text.py new file mode 100644 index 00000000000..ec290def45d --- /dev/null +++ b/homeassistant/components/shelly/text.py @@ -0,0 +1,89 @@ +"""Text for Shelly.""" + +from __future__ import annotations + +from dataclasses import dataclass +from typing import TYPE_CHECKING, Final + +from aioshelly.const import RPC_GENERATIONS + +from homeassistant.components.text import ( + DOMAIN as TEXT_PLATFORM, + TextEntity, + TextEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .coordinator import ShellyConfigEntry +from .entity import ( + RpcEntityDescription, + ShellyRpcAttributeEntity, + async_setup_entry_rpc, +) +from .utils import ( + async_remove_orphaned_virtual_entities, + get_device_entry_gen, + get_virtual_component_ids, +) + + +@dataclass(frozen=True, kw_only=True) +class RpcTextDescription(RpcEntityDescription, TextEntityDescription): + """Class to describe a RPC text entity.""" + + +RPC_TEXT_ENTITIES: Final = { + "text": RpcTextDescription( + key="text", + sub_key="value", + has_entity_name=True, + ), +} + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ShellyConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up sensors for device.""" + if get_device_entry_gen(config_entry) in RPC_GENERATIONS: + coordinator = config_entry.runtime_data.rpc + assert coordinator + + async_setup_entry_rpc( + hass, config_entry, async_add_entities, RPC_TEXT_ENTITIES, RpcText + ) + + # the user can remove virtual components from the device configuration, so + # we need to remove orphaned entities + virtual_text_ids = get_virtual_component_ids( + coordinator.device.config, TEXT_PLATFORM + ) + async_remove_orphaned_virtual_entities( + hass, + config_entry.entry_id, + coordinator.mac, + TEXT_PLATFORM, + "text", + virtual_text_ids, + ) + + +class RpcText(ShellyRpcAttributeEntity, TextEntity): + """Represent a RPC text entity.""" + + entity_description: RpcTextDescription + + @property + def native_value(self) -> str | None: + """Return value of sensor.""" + if TYPE_CHECKING: + assert isinstance(self.attribute_value, str | None) + + return self.attribute_value + + async def async_set_value(self, value: str) -> None: + """Change the value.""" + await self.call_rpc("Text.Set", {"id": self._id, "value": value}) diff --git a/homeassistant/components/shelly/utils.py b/homeassistant/components/shelly/utils.py index bcd5a859538..4db5f9badbb 100644 --- a/homeassistant/components/shelly/utils.py +++ b/homeassistant/components/shelly/utils.py @@ -3,7 +3,8 @@ from __future__ import annotations from datetime import datetime, timedelta -from ipaddress import IPv4Address +from ipaddress import IPv4Address, IPv6Address, ip_address +import re from types import MappingProxyType from typing import Any, cast @@ -52,6 +53,7 @@ from .const import ( SHBTN_MODELS, SHIX3_1_INPUTS_EVENTS_TYPES, UPTIME_DEVIATION, + VIRTUAL_COMPONENTS_MAP, ) @@ -321,6 +323,8 @@ def get_rpc_channel_name(device: RpcDevice, key: str) -> str: return f"{device_name} {key.replace(':', '_')}" if key.startswith("em1"): return f"{device_name} EM{key.split(':')[-1]}" + if key.startswith(("boolean:", "number:", "text:")): + return key.replace(":", " ").title() return device_name return entity_name @@ -482,6 +486,20 @@ def get_http_port(data: MappingProxyType[str, Any]) -> int: return cast(int, data.get(CONF_PORT, DEFAULT_HTTP_PORT)) +def get_host(host: str) -> str: + """Get the device IP address or hostname.""" + try: + ip_object = ip_address(host) + except ValueError: + # host contains hostname + return host + + if isinstance(ip_object, IPv6Address): + return f"[{host}]" + + return host + + @callback def async_remove_shelly_rpc_entities( hass: HomeAssistant, domain: str, mac: str, keys: list[str] @@ -497,3 +515,59 @@ def async_remove_shelly_rpc_entities( def is_rpc_thermostat_mode(ident: int, status: dict[str, Any]) -> bool: """Return True if 'thermostat:' is present in the status.""" return f"thermostat:{ident}" in status + + +def get_virtual_component_ids(config: dict[str, Any], platform: str) -> list[str]: + """Return a list of virtual component IDs for a platform.""" + component = VIRTUAL_COMPONENTS_MAP.get(platform) + + if not component: + return [] + + ids: list[str] = [] + + for comp_type in component["types"]: + ids.extend( + k + for k, v in config.items() + if k.startswith(comp_type) and v["meta"]["ui"]["view"] in component["modes"] + ) + + return ids + + +@callback +def async_remove_orphaned_virtual_entities( + hass: HomeAssistant, + config_entry_id: str, + mac: str, + platform: str, + virt_comp_type: str, + virt_comp_ids: list[str], +) -> None: + """Remove orphaned virtual entities.""" + orphaned_entities = [] + entity_reg = er.async_get(hass) + device_reg = dr.async_get(hass) + + if not ( + devices := device_reg.devices.get_devices_for_config_entry_id(config_entry_id) + ): + return + + device_id = devices[0].id + entities = er.async_entries_for_device(entity_reg, device_id, True) + for entity in entities: + if not entity.entity_id.startswith(platform): + continue + if virt_comp_type not in entity.unique_id: + continue + # we are looking for the component ID, e.g. boolean:201 + if not (match := re.search(r"[a-z]+:\d+", entity.unique_id)): + continue + virt_comp_id = match.group() + if virt_comp_id not in virt_comp_ids: + orphaned_entities.append(f"{virt_comp_id}-{virt_comp_type}") + + if orphaned_entities: + async_remove_shelly_rpc_entities(hass, platform, mac, orphaned_entities) diff --git a/homeassistant/components/sunweg/manifest.json b/homeassistant/components/sunweg/manifest.json index bcf1ad9dae2..998d3610735 100644 --- a/homeassistant/components/sunweg/manifest.json +++ b/homeassistant/components/sunweg/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/sunweg/", "iot_class": "cloud_polling", "loggers": ["sunweg"], - "requirements": ["sunweg==3.0.1"] + "requirements": ["sunweg==3.0.2"] } diff --git a/homeassistant/components/supla/cover.py b/homeassistant/components/supla/cover.py index 4cdee04b149..37b64c375eb 100644 --- a/homeassistant/components/supla/cover.py +++ b/homeassistant/components/supla/cover.py @@ -71,7 +71,9 @@ class SuplaCoverEntity(SuplaEntity, CoverEntity): async def async_set_cover_position(self, **kwargs: Any) -> None: """Move the cover to a specific position.""" - await self.async_action("REVEAL", percentage=kwargs.get(ATTR_POSITION)) + await self.async_action( + "REVEAL_PARTIALLY", percentage=kwargs.get(ATTR_POSITION) + ) @property def is_closed(self) -> bool | None: diff --git a/homeassistant/components/switchbot/manifest.json b/homeassistant/components/switchbot/manifest.json index dc858a688cb..0cbbd70a805 100644 --- a/homeassistant/components/switchbot/manifest.json +++ b/homeassistant/components/switchbot/manifest.json @@ -39,5 +39,5 @@ "documentation": "https://www.home-assistant.io/integrations/switchbot", "iot_class": "local_push", "loggers": ["switchbot"], - "requirements": ["PySwitchbot==0.48.0"] + "requirements": ["PySwitchbot==0.48.1"] } diff --git a/homeassistant/components/synology_dsm/strings.json b/homeassistant/components/synology_dsm/strings.json index 4ed06119577..0f8ea594732 100644 --- a/homeassistant/components/synology_dsm/strings.json +++ b/homeassistant/components/synology_dsm/strings.json @@ -173,7 +173,7 @@ "services": { "reboot": { "name": "Reboot", - "description": "Reboots the NAS. This service is deprecated and will be removed in future release. Please use the corresponding button entity.", + "description": "Reboots the NAS. This action is deprecated and will be removed in future release. Please use the corresponding button entity.", "fields": { "serial": { "name": "Serial", @@ -183,7 +183,7 @@ }, "shutdown": { "name": "Shutdown", - "description": "Shutdowns the NAS. This service is deprecated and will be removed in future release. Please use the corresponding button entity.", + "description": "Shutdowns the NAS. This action is deprecated and will be removed in future release. Please use the corresponding button entity.", "fields": { "serial": { "name": "[%key:component::synology_dsm::services::reboot::fields::serial::name%]", diff --git a/homeassistant/components/tedee/manifest.json b/homeassistant/components/tedee/manifest.json index 24df4cff95c..4f071267a25 100644 --- a/homeassistant/components/tedee/manifest.json +++ b/homeassistant/components/tedee/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["pytedee_async"], "quality_scale": "platinum", - "requirements": ["pytedee-async==0.2.17"] + "requirements": ["pytedee-async==0.2.20"] } diff --git a/homeassistant/components/template/config_flow.py b/homeassistant/components/template/config_flow.py index 5a5527cc7c5..c52a890c1f7 100644 --- a/homeassistant/components/template/config_flow.py +++ b/homeassistant/components/template/config_flow.py @@ -24,6 +24,9 @@ from homeassistant.const import ( CONF_NAME, CONF_STATE, CONF_UNIT_OF_MEASUREMENT, + CONF_URL, + CONF_VALUE_TEMPLATE, + CONF_VERIFY_SSL, Platform, ) from homeassistant.core import HomeAssistant, callback @@ -37,8 +40,10 @@ from homeassistant.helpers.schema_config_entry_flow import ( ) from .binary_sensor import async_create_preview_binary_sensor -from .const import CONF_PRESS, DOMAIN +from .const import CONF_PRESS, CONF_TURN_OFF, CONF_TURN_ON, DOMAIN +from .select import CONF_OPTIONS, CONF_SELECT_OPTION from .sensor import async_create_preview_sensor +from .switch import async_create_preview_switch from .template_entity import TemplateEntity _SCHEMA_STATE: dict[vol.Marker, Any] = { @@ -83,6 +88,18 @@ def generate_schema(domain: str, flow_type: str) -> vol.Schema: ) } + if domain == Platform.IMAGE: + schema |= { + vol.Required(CONF_URL): selector.TemplateSelector(), + vol.Optional(CONF_VERIFY_SSL, default=True): selector.BooleanSelector(), + } + + if domain == Platform.SELECT: + schema |= _SCHEMA_STATE | { + vol.Required(CONF_OPTIONS): selector.TemplateSelector(), + vol.Optional(CONF_SELECT_OPTION): selector.ActionSelector(), + } + if domain == Platform.SENSOR: schema |= _SCHEMA_STATE | { vol.Optional(CONF_UNIT_OF_MEASUREMENT): selector.SelectSelector( @@ -123,6 +140,13 @@ def generate_schema(domain: str, flow_type: str) -> vol.Schema: ), } + if domain == Platform.SWITCH: + schema |= { + vol.Optional(CONF_VALUE_TEMPLATE): selector.TemplateSelector(), + vol.Optional(CONF_TURN_ON): selector.ActionSelector(), + vol.Optional(CONF_TURN_OFF): selector.ActionSelector(), + } + schema[vol.Optional(CONF_DEVICE_ID)] = selector.DeviceSelector() return vol.Schema(schema) @@ -213,7 +237,10 @@ def validate_user_input( TEMPLATE_TYPES = [ "binary_sensor", "button", + "image", + "select", "sensor", + "switch", ] CONFIG_FLOW = { @@ -227,11 +254,24 @@ CONFIG_FLOW = { config_schema(Platform.BUTTON), validate_user_input=validate_user_input(Platform.BUTTON), ), + Platform.IMAGE: SchemaFlowFormStep( + config_schema(Platform.IMAGE), + validate_user_input=validate_user_input(Platform.IMAGE), + ), + Platform.SELECT: SchemaFlowFormStep( + config_schema(Platform.SELECT), + validate_user_input=validate_user_input(Platform.SELECT), + ), Platform.SENSOR: SchemaFlowFormStep( config_schema(Platform.SENSOR), preview="template", validate_user_input=validate_user_input(Platform.SENSOR), ), + Platform.SWITCH: SchemaFlowFormStep( + config_schema(Platform.SWITCH), + preview="template", + validate_user_input=validate_user_input(Platform.SWITCH), + ), } @@ -246,11 +286,24 @@ OPTIONS_FLOW = { options_schema(Platform.BUTTON), validate_user_input=validate_user_input(Platform.BUTTON), ), + Platform.IMAGE: SchemaFlowFormStep( + options_schema(Platform.IMAGE), + validate_user_input=validate_user_input(Platform.IMAGE), + ), + Platform.SELECT: SchemaFlowFormStep( + options_schema(Platform.SELECT), + validate_user_input=validate_user_input(Platform.SELECT), + ), Platform.SENSOR: SchemaFlowFormStep( options_schema(Platform.SENSOR), preview="template", validate_user_input=validate_user_input(Platform.SENSOR), ), + Platform.SWITCH: SchemaFlowFormStep( + options_schema(Platform.SWITCH), + preview="template", + validate_user_input=validate_user_input(Platform.SWITCH), + ), } CREATE_PREVIEW_ENTITY: dict[ @@ -259,6 +312,7 @@ CREATE_PREVIEW_ENTITY: dict[ ] = { "binary_sensor": async_create_preview_binary_sensor, "sensor": async_create_preview_sensor, + "switch": async_create_preview_switch, } diff --git a/homeassistant/components/template/const.py b/homeassistant/components/template/const.py index e7681225a49..8b4e46ba383 100644 --- a/homeassistant/components/template/const.py +++ b/homeassistant/components/template/const.py @@ -34,3 +34,5 @@ CONF_ATTRIBUTE_TEMPLATES = "attribute_templates" CONF_PICTURE = "picture" CONF_PRESS = "press" CONF_OBJECT_ID = "object_id" +CONF_TURN_OFF = "turn_off" +CONF_TURN_ON = "turn_on" diff --git a/homeassistant/components/template/image.py b/homeassistant/components/template/image.py index 92f0fe7b9fa..ba85418c339 100644 --- a/homeassistant/components/template/image.py +++ b/homeassistant/components/template/image.py @@ -8,10 +8,18 @@ from typing import Any import voluptuous as vol from homeassistant.components.image import DOMAIN as IMAGE_DOMAIN, ImageEntity -from homeassistant.const import CONF_UNIQUE_ID, CONF_URL, CONF_VERIFY_SSL +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ( + CONF_DEVICE_ID, + CONF_NAME, + CONF_UNIQUE_ID, + CONF_URL, + CONF_VERIFY_SSL, +) from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import TemplateError -import homeassistant.helpers.config_validation as cv +from homeassistant.helpers import config_validation as cv, selector +from homeassistant.helpers.device import async_device_info_to_link_from_device_id from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util import dt as dt_util @@ -35,6 +43,16 @@ IMAGE_SCHEMA = vol.Schema( ).extend(make_template_entity_common_schema(DEFAULT_NAME).schema) +IMAGE_CONFIG_SCHEMA = vol.Schema( + { + vol.Optional(CONF_NAME): cv.template, + vol.Required(CONF_URL): cv.template, + vol.Optional(CONF_VERIFY_SSL, default=True): bool, + vol.Optional(CONF_DEVICE_ID): selector.DeviceSelector(), + } +) + + async def _async_create_entities( hass: HomeAssistant, definitions: list[dict[str, Any]], unique_id_prefix: str | None ) -> list[StateImageEntity]: @@ -75,6 +93,20 @@ async def async_setup_platform( ) +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Initialize config entry.""" + _options = dict(config_entry.options) + _options.pop("template_type") + validated_config = IMAGE_CONFIG_SCHEMA(_options) + async_add_entities( + [StateImageEntity(hass, validated_config, config_entry.entry_id)] + ) + + class StateImageEntity(TemplateEntity, ImageEntity): """Representation of a template image.""" @@ -91,6 +123,10 @@ class StateImageEntity(TemplateEntity, ImageEntity): TemplateEntity.__init__(self, hass, config=config, unique_id=unique_id) ImageEntity.__init__(self, hass, config[CONF_VERIFY_SSL]) self._url_template = config[CONF_URL] + self._attr_device_info = async_device_info_to_link_from_device_id( + hass, + config.get(CONF_DEVICE_ID), + ) @property def entity_picture(self) -> str | None: diff --git a/homeassistant/components/template/select.py b/homeassistant/components/template/select.py index 650b236faee..bd37ca1015c 100644 --- a/homeassistant/components/template/select.py +++ b/homeassistant/components/template/select.py @@ -13,9 +13,17 @@ from homeassistant.components.select import ( DOMAIN as SELECT_DOMAIN, SelectEntity, ) -from homeassistant.const import CONF_NAME, CONF_OPTIMISTIC, CONF_STATE, CONF_UNIQUE_ID +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ( + CONF_DEVICE_ID, + CONF_NAME, + CONF_OPTIMISTIC, + CONF_STATE, + CONF_UNIQUE_ID, +) from homeassistant.core import HomeAssistant, callback -import homeassistant.helpers.config_validation as cv +from homeassistant.helpers import config_validation as cv, selector +from homeassistant.helpers.device import async_device_info_to_link_from_device_id from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.script import Script from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType @@ -31,6 +39,7 @@ from .trigger_entity import TriggerEntity _LOGGER = logging.getLogger(__name__) +CONF_OPTIONS = "options" CONF_SELECT_OPTION = "select_option" DEFAULT_NAME = "Template Select" @@ -52,6 +61,17 @@ SELECT_SCHEMA = ( ) +SELECT_CONFIG_SCHEMA = vol.Schema( + { + vol.Required(CONF_NAME): cv.template, + vol.Required(CONF_STATE): cv.template, + vol.Required(CONF_OPTIONS): cv.template, + vol.Optional(CONF_SELECT_OPTION): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_DEVICE_ID): selector.DeviceSelector(), + } +) + + async def _async_create_entities( hass: HomeAssistant, definitions: list[dict[str, Any]], unique_id_prefix: str | None ) -> list[TemplateSelect]: @@ -92,6 +112,18 @@ async def async_setup_platform( ) +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Initialize config entry.""" + _options = dict(config_entry.options) + _options.pop("template_type") + validated_config = SELECT_CONFIG_SCHEMA(_options) + async_add_entities([TemplateSelect(hass, validated_config, config_entry.entry_id)]) + + class TemplateSelect(TemplateEntity, SelectEntity): """Representation of a template select.""" @@ -107,13 +139,18 @@ class TemplateSelect(TemplateEntity, SelectEntity): super().__init__(hass, config=config, unique_id=unique_id) assert self._attr_name is not None self._value_template = config[CONF_STATE] - self._command_select_option = Script( - hass, config[CONF_SELECT_OPTION], self._attr_name, DOMAIN - ) + if (selection_option := config.get(CONF_SELECT_OPTION)) is not None: + self._command_select_option = Script( + hass, selection_option, self._attr_name, DOMAIN + ) self._options_template = config[ATTR_OPTIONS] - self._attr_assumed_state = self._optimistic = config[CONF_OPTIMISTIC] + self._attr_assumed_state = self._optimistic = config.get(CONF_OPTIMISTIC, False) self._attr_options = [] self._attr_current_option = None + self._attr_device_info = async_device_info_to_link_from_device_id( + hass, + config.get(CONF_DEVICE_ID), + ) @callback def _async_setup_templates(self) -> None: @@ -137,11 +174,12 @@ class TemplateSelect(TemplateEntity, SelectEntity): if self._optimistic: self._attr_current_option = option self.async_write_ha_state() - await self.async_run_script( - self._command_select_option, - run_variables={ATTR_OPTION: option}, - context=self._context, - ) + if self._command_select_option: + await self.async_run_script( + self._command_select_option, + run_variables={ATTR_OPTION: option}, + context=self._context, + ) class TriggerSelectEntity(TriggerEntity, SelectEntity): diff --git a/homeassistant/components/template/strings.json b/homeassistant/components/template/strings.json index df281b77daa..f004c342eab 100644 --- a/homeassistant/components/template/strings.json +++ b/homeassistant/components/template/strings.json @@ -25,6 +25,31 @@ }, "title": "Template button" }, + "image": { + "data": { + "device_id": "[%key:common::config_flow::data::device%]", + "name": "[%key:common::config_flow::data::name%]", + "url": "[%key:common::config_flow::data::url%]", + "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" + }, + "data_description": { + "device_id": "[%key:component::template::config::step::sensor::data_description::device_id%]" + }, + "title": "Template image" + }, + "select": { + "data": { + "device_id": "[%key:common::config_flow::data::device%]", + "name": "[%key:common::config_flow::data::name%]", + "state": "[%key:component::template::config::step::sensor::data::state%]", + "select_option": "Actions on select", + "options": "Available options" + }, + "data_description": { + "device_id": "[%key:component::template::config::step::sensor::data_description::device_id%]" + }, + "title": "Template select" + }, "sensor": { "data": { "device_id": "[%key:common::config_flow::data::device%]", @@ -44,9 +69,25 @@ "menu_options": { "binary_sensor": "Template a binary sensor", "button": "Template a button", - "sensor": "Template a sensor" + "image": "Template a image", + "select": "Template a select", + "sensor": "Template a sensor", + "switch": "Template a switch" }, "title": "Template helper" + }, + "switch": { + "data": { + "device_id": "[%key:common::config_flow::data::device%]", + "name": "[%key:common::config_flow::data::name%]", + "turn_off": "Actions on turn off", + "turn_on": "Actions on turn on", + "value_template": "Value template" + }, + "data_description": { + "device_id": "[%key:component::template::config::step::sensor::data_description::device_id%]" + }, + "title": "Template switch" } } }, @@ -72,6 +113,30 @@ }, "title": "[%key:component::template::config::step::button::title%]" }, + "image": { + "data": { + "device_id": "[%key:common::config_flow::data::device%]", + "url": "[%key:common::config_flow::data::url%]", + "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" + }, + "data_description": { + "device_id": "[%key:component::template::config::step::sensor::data_description::device_id%]" + }, + "title": "[%key:component::template::config::step::image::title%]" + }, + "select": { + "data": { + "device_id": "[%key:common::config_flow::data::device%]", + "name": "[%key:common::config_flow::data::name%]", + "state": "[%key:component::template::config::step::sensor::data::state%]", + "select_option": "[%key:component::template::config::step::select::data::select_option%]", + "options": "[%key:component::template::config::step::select::data::options%]" + }, + "data_description": { + "device_id": "[%key:component::template::config::step::sensor::data_description::device_id%]" + }, + "title": "[%key:component::template::config::step::select::title%]" + }, "sensor": { "data": { "device_id": "[%key:common::config_flow::data::device%]", @@ -84,6 +149,19 @@ "device_id": "[%key:component::template::config::step::sensor::data_description::device_id%]" }, "title": "[%key:component::template::config::step::sensor::title%]" + }, + "switch": { + "data": { + "device_id": "[%key:common::config_flow::data::device%]", + "name": "[%key:common::config_flow::data::name%]", + "value_template": "[%key:component::template::config::step::switch::data::value_template%]", + "turn_off": "[%key:component::template::config::step::switch::data::turn_off%]", + "turn_on": "[%key:component::template::config::step::switch::data::turn_on%]" + }, + "data_description": { + "device_id": "[%key:component::template::config::step::sensor::data_description::device_id%]" + }, + "title": "[%key:component::template::config::step::switch::title%]" } } }, diff --git a/homeassistant/components/template/switch.py b/homeassistant/components/template/switch.py index 3a7cfcde0f7..fbb35399ef8 100644 --- a/homeassistant/components/template/switch.py +++ b/homeassistant/components/template/switch.py @@ -11,9 +11,12 @@ from homeassistant.components.switch import ( PLATFORM_SCHEMA as SWITCH_PLATFORM_SCHEMA, SwitchEntity, ) +from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, + CONF_DEVICE_ID, + CONF_NAME, CONF_SWITCHES, CONF_UNIQUE_ID, CONF_VALUE_TEMPLATE, @@ -22,14 +25,15 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import TemplateError -import homeassistant.helpers.config_validation as cv +from homeassistant.helpers import config_validation as cv, selector +from homeassistant.helpers.device import async_device_info_to_link_from_device_id from homeassistant.helpers.entity import async_generate_entity_id from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.script import Script from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from .const import DOMAIN +from .const import CONF_TURN_OFF, CONF_TURN_ON, DOMAIN from .template_entity import ( TEMPLATE_ENTITY_COMMON_SCHEMA_LEGACY, TemplateEntity, @@ -38,16 +42,13 @@ from .template_entity import ( _VALID_STATES = [STATE_ON, STATE_OFF, "true", "false"] -ON_ACTION = "turn_on" -OFF_ACTION = "turn_off" - SWITCH_SCHEMA = vol.All( cv.deprecated(ATTR_ENTITY_ID), vol.Schema( { vol.Optional(CONF_VALUE_TEMPLATE): cv.template, - vol.Required(ON_ACTION): cv.SCRIPT_SCHEMA, - vol.Required(OFF_ACTION): cv.SCRIPT_SCHEMA, + vol.Required(CONF_TURN_ON): cv.SCRIPT_SCHEMA, + vol.Required(CONF_TURN_OFF): cv.SCRIPT_SCHEMA, vol.Optional(ATTR_FRIENDLY_NAME): cv.string, vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, vol.Optional(CONF_UNIQUE_ID): cv.string, @@ -59,6 +60,16 @@ PLATFORM_SCHEMA = SWITCH_PLATFORM_SCHEMA.extend( {vol.Required(CONF_SWITCHES): cv.schema_with_slug_keys(SWITCH_SCHEMA)} ) +SWITCH_CONFIG_SCHEMA = vol.Schema( + { + vol.Required(CONF_NAME): cv.template, + vol.Optional(CONF_VALUE_TEMPLATE): cv.template, + vol.Optional(CONF_TURN_ON): selector.ActionSelector(), + vol.Optional(CONF_TURN_OFF): selector.ActionSelector(), + vol.Optional(CONF_DEVICE_ID): selector.DeviceSelector(), + } +) + async def _async_create_entities(hass, config): """Create the Template switches.""" @@ -90,6 +101,29 @@ async def async_setup_platform( async_add_entities(await _async_create_entities(hass, config)) +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Initialize config entry.""" + _options = dict(config_entry.options) + _options.pop("template_type") + validated_config = SWITCH_CONFIG_SCHEMA(_options) + async_add_entities( + [SwitchTemplate(hass, None, validated_config, config_entry.entry_id)] + ) + + +@callback +def async_create_preview_switch( + hass: HomeAssistant, name: str, config: dict[str, Any] +) -> SwitchTemplate: + """Create a preview switch.""" + validated_config = SWITCH_CONFIG_SCHEMA(config | {CONF_NAME: name}) + return SwitchTemplate(hass, None, validated_config, None) + + class SwitchTemplate(TemplateEntity, SwitchEntity, RestoreEntity): """Representation of a Template switch.""" @@ -106,15 +140,28 @@ class SwitchTemplate(TemplateEntity, SwitchEntity, RestoreEntity): super().__init__( hass, config=config, fallback_name=object_id, unique_id=unique_id ) - self.entity_id = async_generate_entity_id( - ENTITY_ID_FORMAT, object_id, hass=hass - ) + if object_id is not None: + self.entity_id = async_generate_entity_id( + ENTITY_ID_FORMAT, object_id, hass=hass + ) friendly_name = self._attr_name self._template = config.get(CONF_VALUE_TEMPLATE) - self._on_script = Script(hass, config[ON_ACTION], friendly_name, DOMAIN) - self._off_script = Script(hass, config[OFF_ACTION], friendly_name, DOMAIN) + self._on_script = ( + Script(hass, config.get(CONF_TURN_ON), friendly_name, DOMAIN) + if config.get(CONF_TURN_ON) is not None + else None + ) + self._off_script = ( + Script(hass, config.get(CONF_TURN_OFF), friendly_name, DOMAIN) + if config.get(CONF_TURN_OFF) is not None + else None + ) self._state: bool | None = False self._attr_assumed_state = self._template is None + self._attr_device_info = async_device_info_to_link_from_device_id( + hass, + config.get(CONF_DEVICE_ID), + ) @callback def _update_state(self, result): @@ -159,14 +206,16 @@ class SwitchTemplate(TemplateEntity, SwitchEntity, RestoreEntity): async def async_turn_on(self, **kwargs: Any) -> None: """Fire the on action.""" - await self.async_run_script(self._on_script, context=self._context) + if self._on_script: + await self.async_run_script(self._on_script, context=self._context) if self._template is None: self._state = True self.async_write_ha_state() async def async_turn_off(self, **kwargs: Any) -> None: """Fire the off action.""" - await self.async_run_script(self._off_script, context=self._context) + if self._off_script: + await self.async_run_script(self._off_script, context=self._context) if self._template is None: self._state = False self.async_write_ha_state() diff --git a/homeassistant/components/tessie/const.py b/homeassistant/components/tessie/const.py index bdb20193613..90862eff969 100644 --- a/homeassistant/components/tessie/const.py +++ b/homeassistant/components/tessie/const.py @@ -38,6 +38,15 @@ class TessieSeatHeaterOptions(StrEnum): HIGH = "high" +class TessieSeatCoolerOptions(StrEnum): + """Tessie seat cooler options.""" + + OFF = "off" + LOW = "low" + MEDIUM = "medium" + HIGH = "high" + + class TessieClimateKeeper(StrEnum): """Tessie Climate Keeper Modes.""" diff --git a/homeassistant/components/tessie/cover.py b/homeassistant/components/tessie/cover.py index 6fdd950b809..109bdbce2bf 100644 --- a/homeassistant/components/tessie/cover.py +++ b/homeassistant/components/tessie/cover.py @@ -2,14 +2,17 @@ from __future__ import annotations +from itertools import chain from typing import Any from tessie_api import ( close_charge_port, + close_sunroof, close_windows, open_close_rear_trunk, open_front_trunk, open_unlock_charge_port, + vent_sunroof, vent_windows, ) @@ -36,14 +39,25 @@ async def async_setup_entry( data = entry.runtime_data async_add_entities( - klass(vehicle) - for klass in ( - TessieWindowEntity, - TessieChargePortEntity, - TessieFrontTrunkEntity, - TessieRearTrunkEntity, + chain( + ( + klass(vehicle) + for klass in ( + TessieWindowEntity, + TessieChargePortEntity, + TessieFrontTrunkEntity, + TessieRearTrunkEntity, + ) + for vehicle in data.vehicles + ), + ( + TessieSunroofEntity(vehicle) + for vehicle in data.vehicles + if vehicle.data_coordinator.data.get( + "vehicle_config_sun_roof_installed" + ) + ), ) - for vehicle in data.vehicles ) @@ -161,3 +175,34 @@ class TessieRearTrunkEntity(TessieEntity, CoverEntity): if self._value == TessieCoverStates.OPEN: await self.run(open_close_rear_trunk) self.set((self.key, TessieCoverStates.CLOSED)) + + +class TessieSunroofEntity(TessieEntity, CoverEntity): + """Cover entity for the sunroof.""" + + _attr_device_class = CoverDeviceClass.WINDOW + _attr_supported_features = CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE + + def __init__(self, vehicle: TessieVehicleData) -> None: + """Initialize the sensor.""" + super().__init__(vehicle, "vehicle_state_sun_roof_state") + + @property + def is_closed(self) -> bool | None: + """Return if the cover is closed or not.""" + return self._value == TessieCoverStates.CLOSED + + @property + def current_cover_position(self) -> bool | None: + """Return the percentage open.""" + return self.get("vehicle_state_sun_roof_percent_open") + + async def async_open_cover(self, **kwargs: Any) -> None: + """Open sunroof.""" + await self.run(vent_sunroof) + self.set((self.key, TessieCoverStates.OPEN)) + + async def async_close_cover(self, **kwargs: Any) -> None: + """Close sunroof.""" + await self.run(close_sunroof) + self.set((self.key, TessieCoverStates.CLOSED)) diff --git a/homeassistant/components/tessie/manifest.json b/homeassistant/components/tessie/manifest.json index 493feeaa77e..81d10eb4ef7 100644 --- a/homeassistant/components/tessie/manifest.json +++ b/homeassistant/components/tessie/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/tessie", "iot_class": "cloud_polling", "loggers": ["tessie"], - "requirements": ["tessie-api==0.0.9", "tesla-fleet-api==0.6.2"] + "requirements": ["tessie-api==0.1.1", "tesla-fleet-api==0.6.2"] } diff --git a/homeassistant/components/tessie/select.py b/homeassistant/components/tessie/select.py index 90e00084f15..1d02d07a741 100644 --- a/homeassistant/components/tessie/select.py +++ b/homeassistant/components/tessie/select.py @@ -5,14 +5,14 @@ from __future__ import annotations from itertools import chain from tesla_fleet_api.const import EnergyExportMode, EnergyOperationMode -from tessie_api import set_seat_heat +from tessie_api import set_seat_cool, set_seat_heat from homeassistant.components.select import SelectEntity from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import TessieConfigEntry -from .const import TessieSeatHeaterOptions +from .const import TessieSeatCoolerOptions, TessieSeatHeaterOptions from .entity import TessieEnergyEntity, TessieEntity from .helpers import handle_command from .models import TessieEnergyData @@ -27,6 +27,11 @@ SEAT_HEATERS = { "climate_state_seat_heater_third_row_right": "third_row_right", } +SEAT_COOLERS = { + "climate_state_seat_fan_front_left": "front_left", + "climate_state_seat_fan_front_right": "front_right", +} + async def async_setup_entry( hass: HomeAssistant, @@ -44,6 +49,13 @@ async def async_setup_entry( if key in vehicle.data_coordinator.data # not all vehicles have rear center or third row ), + ( + TessieSeatCoolerSelectEntity(vehicle, key) + for vehicle in entry.runtime_data.vehicles + for key in SEAT_COOLERS + if key + in vehicle.data_coordinator.data # not all vehicles have ventilated seats + ), ( TessieOperationSelectEntity(energysite) for energysite in entry.runtime_data.energysites @@ -81,6 +93,28 @@ class TessieSeatHeaterSelectEntity(TessieEntity, SelectEntity): self.set((self.key, level)) +class TessieSeatCoolerSelectEntity(TessieEntity, SelectEntity): + """Select entity for cooled seat.""" + + _attr_options = [ + TessieSeatCoolerOptions.OFF, + TessieSeatCoolerOptions.LOW, + TessieSeatCoolerOptions.MEDIUM, + TessieSeatCoolerOptions.HIGH, + ] + + @property + def current_option(self) -> str | None: + """Return the current selected option.""" + return self._attr_options[self._value] + + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + level = self._attr_options.index(option) + await self.run(set_seat_cool, seat=SEAT_COOLERS[self.key], level=level) + self.set((self.key, level)) + + class TessieOperationSelectEntity(TessieEnergyEntity, SelectEntity): """Select entity for operation mode select entities.""" diff --git a/homeassistant/components/tessie/strings.json b/homeassistant/components/tessie/strings.json index 72f72558792..dd8ac39f4e5 100644 --- a/homeassistant/components/tessie/strings.json +++ b/homeassistant/components/tessie/strings.json @@ -235,6 +235,9 @@ }, "vehicle_state_rt": { "name": "Trunk" + }, + "vehicle_state_sun_roof_state": { + "name": "Sunroof" } }, "select": { @@ -301,6 +304,24 @@ "high": "[%key:component::tessie::entity::select::climate_state_seat_heater_left::state::high%]" } }, + "climate_state_seat_fan_front_left": { + "name": "Seat cooler left", + "state": { + "off": "[%key:common::state::off%]", + "low": "[%key:component::tessie::entity::select::climate_state_seat_heater_left::state::low%]", + "medium": "[%key:component::tessie::entity::select::climate_state_seat_heater_left::state::medium%]", + "high": "[%key:component::tessie::entity::select::climate_state_seat_heater_left::state::high%]" + } + }, + "climate_state_seat_fan_front_right": { + "name": "Seat cooler right", + "state": { + "off": "[%key:common::state::off%]", + "low": "[%key:component::tessie::entity::select::climate_state_seat_heater_left::state::low%]", + "medium": "[%key:component::tessie::entity::select::climate_state_seat_heater_left::state::medium%]", + "high": "[%key:component::tessie::entity::select::climate_state_seat_heater_left::state::high%]" + } + }, "components_customer_preferred_export_rule": { "name": "Allow export", "state": { diff --git a/homeassistant/components/todo/__init__.py b/homeassistant/components/todo/__init__.py index a515f0805e7..5febc9561c4 100644 --- a/homeassistant/components/todo/__init__.py +++ b/homeassistant/components/todo/__init__.py @@ -33,9 +33,13 @@ from .const import ( ATTR_DUE, ATTR_DUE_DATE, ATTR_DUE_DATETIME, + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, DOMAIN, TodoItemStatus, TodoListEntityFeature, + TodoServices, ) _LOGGER = logging.getLogger(__name__) @@ -118,11 +122,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: websocket_api.async_register_command(hass, websocket_handle_todo_item_move) component.async_register_entity_service( - "add_item", + TodoServices.ADD_ITEM, vol.All( cv.make_entity_service_schema( { - vol.Required("item"): vol.All(cv.string, vol.Length(min=1)), + vol.Required(ATTR_ITEM): vol.All(cv.string, vol.Length(min=1)), **TODO_ITEM_FIELD_SCHEMA, } ), @@ -132,13 +136,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: required_features=[TodoListEntityFeature.CREATE_TODO_ITEM], ) component.async_register_entity_service( - "update_item", + TodoServices.UPDATE_ITEM, vol.All( cv.make_entity_service_schema( { - vol.Required("item"): vol.All(cv.string, vol.Length(min=1)), - vol.Optional("rename"): vol.All(cv.string, vol.Length(min=1)), - vol.Optional("status"): vol.In( + vol.Required(ATTR_ITEM): vol.All(cv.string, vol.Length(min=1)), + vol.Optional(ATTR_RENAME): vol.All(cv.string, vol.Length(min=1)), + vol.Optional(ATTR_STATUS): vol.In( {TodoItemStatus.NEEDS_ACTION, TodoItemStatus.COMPLETED}, ), **TODO_ITEM_FIELD_SCHEMA, @@ -146,27 +150,29 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: ), *TODO_ITEM_FIELD_VALIDATIONS, cv.has_at_least_one_key( - "rename", "status", *[desc.service_field for desc in TODO_ITEM_FIELDS] + ATTR_RENAME, + ATTR_STATUS, + *[desc.service_field for desc in TODO_ITEM_FIELDS], ), ), _async_update_todo_item, required_features=[TodoListEntityFeature.UPDATE_TODO_ITEM], ) component.async_register_entity_service( - "remove_item", + TodoServices.REMOVE_ITEM, cv.make_entity_service_schema( { - vol.Required("item"): vol.All(cv.ensure_list, [cv.string]), + vol.Required(ATTR_ITEM): vol.All(cv.ensure_list, [cv.string]), } ), _async_remove_todo_items, required_features=[TodoListEntityFeature.DELETE_TODO_ITEM], ) component.async_register_entity_service( - "get_items", + TodoServices.GET_ITEMS, cv.make_entity_service_schema( { - vol.Optional("status"): vol.All( + vol.Optional(ATTR_STATUS): vol.All( cv.ensure_list, [vol.In({TodoItemStatus.NEEDS_ACTION, TodoItemStatus.COMPLETED})], ), @@ -176,7 +182,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: supports_response=SupportsResponse.ONLY, ) component.async_register_entity_service( - "remove_completed_items", + TodoServices.REMOVE_COMPLETED_ITEMS, {}, _async_remove_completed_items, required_features=[TodoListEntityFeature.DELETE_TODO_ITEM], diff --git a/homeassistant/components/todo/const.py b/homeassistant/components/todo/const.py index a605f9fcba2..ee7ef53715d 100644 --- a/homeassistant/components/todo/const.py +++ b/homeassistant/components/todo/const.py @@ -8,6 +8,19 @@ ATTR_DUE = "due" ATTR_DUE_DATE = "due_date" ATTR_DUE_DATETIME = "due_datetime" ATTR_DESCRIPTION = "description" +ATTR_ITEM = "item" +ATTR_RENAME = "rename" +ATTR_STATUS = "status" + + +class TodoServices(StrEnum): + """Services for the To-do integration.""" + + ADD_ITEM = "add_item" + UPDATE_ITEM = "update_item" + REMOVE_ITEM = "remove_item" + GET_ITEMS = "get_items" + REMOVE_COMPLETED_ITEMS = "remove_completed_items" class TodoListEntityFeature(IntFlag): diff --git a/homeassistant/components/tplink/light.py b/homeassistant/components/tplink/light.py index 22e7c523d1a..9b7dd499c97 100644 --- a/homeassistant/components/tplink/light.py +++ b/homeassistant/components/tplink/light.py @@ -392,11 +392,11 @@ class TPLinkLightEffectEntity(TPLinkLightEntity): kwargs[ATTR_EFFECT], brightness=brightness, transition=transition ) elif ATTR_COLOR_TEMP_KELVIN in kwargs: - if self.effect: + if self.effect and self.effect != EFFECT_OFF: # If there is an effect in progress # we have to clear the effect # before we can set a color temp - await self._light_module.set_hsv(0, 0, brightness) + await self._effect_module.set_effect(LightEffect.LIGHT_EFFECTS_OFF) await self._async_set_color_temp( kwargs[ATTR_COLOR_TEMP_KELVIN], brightness, transition ) diff --git a/homeassistant/components/tplink/manifest.json b/homeassistant/components/tplink/manifest.json index 3786a2565c2..a345f64e4b2 100644 --- a/homeassistant/components/tplink/manifest.json +++ b/homeassistant/components/tplink/manifest.json @@ -181,7 +181,7 @@ "macaddress": "1C61B4*" }, { - "hostname": "l5*", + "hostname": "l[59]*", "macaddress": "5CE931*" }, { @@ -189,9 +189,13 @@ "macaddress": "3C52A1*" }, { - "hostname": "l5*", + "hostname": "l[59]*", "macaddress": "5C628B*" }, + { + "hostname": "l[59]*", + "macaddress": "14EBB6*" + }, { "hostname": "tp*", "macaddress": "5C628B*" @@ -297,5 +301,5 @@ "iot_class": "local_polling", "loggers": ["kasa"], "quality_scale": "platinum", - "requirements": ["python-kasa[speedups]==0.7.0.3"] + "requirements": ["python-kasa[speedups]==0.7.0.4"] } diff --git a/homeassistant/components/unifi/config_flow.py b/homeassistant/components/unifi/config_flow.py index e93b59b0673..b5ad1ea2ff0 100644 --- a/homeassistant/components/unifi/config_flow.py +++ b/homeassistant/components/unifi/config_flow.py @@ -164,13 +164,12 @@ class UnifiFlowHandler(ConfigFlow, domain=UNIFI_DOMAIN): config_entry = self.reauth_config_entry abort_reason = "reauth_successful" - if ( - config_entry is not None - and config_entry.state is not ConfigEntryState.NOT_LOADED - ): - hub = config_entry.runtime_data - - if hub and hub.available: + if config_entry: + if ( + config_entry.state is ConfigEntryState.LOADED + and (hub := config_entry.runtime_data) + and hub.available + ): return self.async_abort(reason="already_configured") return self.async_update_reload_and_abort( diff --git a/homeassistant/components/unifiprotect/entity.py b/homeassistant/components/unifiprotect/entity.py index f29d18ce35b..17b9f7c4fe9 100644 --- a/homeassistant/components/unifiprotect/entity.py +++ b/homeassistant/components/unifiprotect/entity.py @@ -182,7 +182,6 @@ class BaseProtectEntity(Entity): self._async_get_ufp_enabled = description.get_ufp_enabled self._async_set_device_info() - self._async_update_device_from_protect(device) self._state_getters = tuple( partial(attrgetter(attr), self) for attr in self._state_attrs ) @@ -252,6 +251,7 @@ class BaseProtectEntity(Entity): self.async_on_remove( self.data.async_subscribe(self.device.mac, self._async_updated_event) ) + self._async_update_device_from_protect(self.device) class ProtectIsOnEntity(BaseProtectEntity): diff --git a/homeassistant/components/unifiprotect/manifest.json b/homeassistant/components/unifiprotect/manifest.json index b369b91264d..3f607ab1938 100644 --- a/homeassistant/components/unifiprotect/manifest.json +++ b/homeassistant/components/unifiprotect/manifest.json @@ -40,7 +40,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["uiprotect", "unifi_discovery"], - "requirements": ["uiprotect==5.2.2", "unifi-discovery==1.2.0"], + "requirements": ["uiprotect==5.3.0", "unifi-discovery==1.2.0"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/homeassistant/components/upb/__init__.py b/homeassistant/components/upb/__init__.py index f2db6ff1b3c..2e5a69393d4 100644 --- a/homeassistant/components/upb/__init__.py +++ b/homeassistant/components/upb/__init__.py @@ -26,7 +26,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b file = config_entry.data[CONF_FILE_PATH] upb = upb_lib.UpbPim({"url": url, "UPStartExportFile": file}) - upb.connect() + await upb.async_connect() hass.data.setdefault(DOMAIN, {}) hass.data[DOMAIN][config_entry.entry_id] = {"upb": upb} diff --git a/homeassistant/components/upb/config_flow.py b/homeassistant/components/upb/config_flow.py index 40f49e57c60..fec93a51202 100644 --- a/homeassistant/components/upb/config_flow.py +++ b/homeassistant/components/upb/config_flow.py @@ -40,7 +40,7 @@ async def _validate_input(data): upb = upb_lib.UpbPim({"url": url, "UPStartExportFile": file_path}) - upb.connect(_connected_callback) + await upb.async_connect(_connected_callback) if not upb.config_ok: _LOGGER.error("Missing or invalid UPB file: %s", file_path) diff --git a/homeassistant/components/upb/manifest.json b/homeassistant/components/upb/manifest.json index b208edbc0e5..6b49c859771 100644 --- a/homeassistant/components/upb/manifest.json +++ b/homeassistant/components/upb/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/upb", "iot_class": "local_push", "loggers": ["upb_lib"], - "requirements": ["upb-lib==0.5.7"] + "requirements": ["upb-lib==0.5.8"] } diff --git a/homeassistant/components/websocket_api/strings.json b/homeassistant/components/websocket_api/strings.json index 10b95637b6b..afef732b8f5 100644 --- a/homeassistant/components/websocket_api/strings.json +++ b/homeassistant/components/websocket_api/strings.json @@ -1,7 +1,7 @@ { "exceptions": { "child_service_not_found": { - "message": "Service {domain}.{service} called service {child_domain}.{child_service} which was not found." + "message": "Action {domain}.{service} uses action {child_domain}.{child_service} which was not found." } } } diff --git a/homeassistant/components/xiaomi_miio/strings.json b/homeassistant/components/xiaomi_miio/strings.json index a9588855818..5037b2c3180 100644 --- a/homeassistant/components/xiaomi_miio/strings.json +++ b/homeassistant/components/xiaomi_miio/strings.json @@ -210,7 +210,7 @@ }, "remote_learn_command": { "name": "Remote learn command", - "description": "Learns an IR command, press \"Call Service\", point the remote at the IR device, and the learned command will be shown as a notification in Overview.", + "description": "Learns an IR command, press \"Perform action\", point the remote at the IR device, and the learned command will be shown as a notification in Overview.", "fields": { "slot": { "name": "Slot", diff --git a/homeassistant/components/zha/helpers.py b/homeassistant/components/zha/helpers.py index 4f60e8b32b2..0691e2429d1 100644 --- a/homeassistant/components/zha/helpers.py +++ b/homeassistant/components/zha/helpers.py @@ -73,7 +73,7 @@ from zha.exceptions import ZHAException from zha.mixins import LogMixin from zha.zigbee.cluster_handlers import ClusterBindEvent, ClusterConfigureReportingEvent from zha.zigbee.device import ClusterHandlerConfigurationComplete, Device, ZHAEvent -from zha.zigbee.group import Group, GroupMember +from zha.zigbee.group import Group, GroupInfo, GroupMember from zigpy.config import ( CONF_DATABASE, CONF_DEVICE, @@ -290,7 +290,11 @@ class ZHAGroupProxy(LogMixin): def log(self, level: int, msg: str, *args: Any, **kwargs) -> None: """Log a message.""" msg = f"[%s](%s): {msg}" - args = (f"0x{self.group.group_id:04x}", self.group.endpoint.id, *args) + args = ( + f"0x{self.group.group_id:04x}", + self.group.endpoint.endpoint_id, + *args, + ) _LOGGER.log(level, msg, *args, **kwargs) @@ -673,8 +677,8 @@ class ZHAGatewayProxy(EventBase): @callback def handle_group_removed(self, event: GroupEvent) -> None: """Handle a group removed event.""" - self._send_group_gateway_message(event.group_info, ZHA_GW_MSG_GROUP_REMOVED) zha_group_proxy = self.group_proxies.pop(event.group_info.group_id) + self._send_group_gateway_message(zha_group_proxy, ZHA_GW_MSG_GROUP_REMOVED) zha_group_proxy.info("group_removed") self._cleanup_group_entity_registry_entries(zha_group_proxy) @@ -760,12 +764,14 @@ class ZHAGatewayProxy(EventBase): zha_device_proxy.device_id = device_registry_device.id return zha_device_proxy - def _async_get_or_create_group_proxy(self, zha_group: Group) -> ZHAGroupProxy: + def _async_get_or_create_group_proxy(self, group_info: GroupInfo) -> ZHAGroupProxy: """Get or create a ZHA group.""" - zha_group_proxy = self.group_proxies.get(zha_group.group_id) + zha_group_proxy = self.group_proxies.get(group_info.group_id) if zha_group_proxy is None: - zha_group_proxy = ZHAGroupProxy(zha_group, self) - self.group_proxies[zha_group.group_id] = zha_group_proxy + zha_group_proxy = ZHAGroupProxy( + self.gateway.groups[group_info.group_id], self + ) + self.group_proxies[group_info.group_id] = zha_group_proxy return zha_group_proxy def _create_entity_metadata( @@ -840,19 +846,17 @@ class ZHAGatewayProxy(EventBase): async_dispatcher_send(self.hass, SIGNAL_ADD_ENTITIES) def _send_group_gateway_message( - self, zigpy_group: zigpy.group.Group, gateway_message_type: str + self, zha_group_proxy: ZHAGroupProxy, gateway_message_type: str ) -> None: """Send the gateway event for a zigpy group event.""" - zha_group = self.group_proxies.get(zigpy_group.group_id) - if zha_group is not None: - async_dispatcher_send( - self.hass, - ZHA_GW_MSG, - { - ATTR_TYPE: gateway_message_type, - ZHA_GW_MSG_GROUP_INFO: zha_group.group_info, - }, - ) + async_dispatcher_send( + self.hass, + ZHA_GW_MSG, + { + ATTR_TYPE: gateway_message_type, + ZHA_GW_MSG_GROUP_INFO: zha_group_proxy.group_info, + }, + ) async def _async_remove_device( self, device: ZHADeviceProxy, entity_refs: list[EntityReference] | None diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index b689e330727..6067fa897f5 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -21,7 +21,7 @@ "zha", "universal_silabs_flasher" ], - "requirements": ["universal-silabs-flasher==0.0.20", "zha==0.0.19"], + "requirements": ["universal-silabs-flasher==0.0.21", "zha==0.0.23"], "usb": [ { "vid": "10C4", diff --git a/homeassistant/components/zha/select.py b/homeassistant/components/zha/select.py index dfe9de24b40..fdb47b550fe 100644 --- a/homeassistant/components/zha/select.py +++ b/homeassistant/components/zha/select.py @@ -8,7 +8,7 @@ from typing import Any from homeassistant.components.select import SelectEntity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_UNKNOWN, Platform +from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant, State, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -69,7 +69,7 @@ class ZHAEnumSelectEntity(ZHAEntity, SelectEntity): @callback def restore_external_state_attributes(self, state: State) -> None: """Restore entity state.""" - if state.state and state.state != STATE_UNKNOWN: + if state.state and state.state not in (STATE_UNKNOWN, STATE_UNAVAILABLE): self.entity_data.entity.restore_external_state_attributes( state=state.state, ) diff --git a/homeassistant/components/zha/strings.json b/homeassistant/components/zha/strings.json index f25fdf1ebe4..5d81556564a 100644 --- a/homeassistant/components/zha/strings.json +++ b/homeassistant/components/zha/strings.json @@ -413,7 +413,7 @@ }, "warning_device_squawk": { "name": "Warning device squawk", - "description": "This service uses the WD capabilities to emit a quick audible/visible pulse called a \"squawk\". The squawk command has no effect if the WD is currently active (warning in progress).", + "description": "This action uses the WD capabilities to emit a quick audible/visible pulse called a \"squawk\". The squawk command has no effect if the WD is currently active (warning in progress).", "fields": { "ieee": { "name": "[%key:component::zha::services::permit::fields::ieee::name%]", @@ -435,7 +435,7 @@ }, "warning_device_warn": { "name": "Warning device starts alert", - "description": "This service starts the operation of the warning device. The warning device alerts the surrounding area by audible (siren) and visual (strobe) signals.", + "description": "This action starts the operation of the warning device. The warning device alerts the surrounding area by audible (siren) and visual (strobe) signals.", "fields": { "ieee": { "name": "[%key:component::zha::services::permit::fields::ieee::name%]", diff --git a/homeassistant/components/zha/websocket_api.py b/homeassistant/components/zha/websocket_api.py index 053a941de8d..97c625a27ed 100644 --- a/homeassistant/components/zha/websocket_api.py +++ b/homeassistant/components/zha/websocket_api.py @@ -47,7 +47,7 @@ from zha.application.helpers import ( ) from zha.zigbee.cluster_handlers.const import CLUSTER_HANDLER_IAS_WD from zha.zigbee.device import Device -from zha.zigbee.group import GroupMember +from zha.zigbee.group import GroupMemberReference import zigpy.backups from zigpy.config import CONF_DEVICE from zigpy.config.validators import cv_boolean @@ -259,9 +259,9 @@ class ClusterBinding(NamedTuple): endpoint_id: int -def _cv_group_member(value: dict[str, Any]) -> GroupMember: +def _cv_group_member(value: dict[str, Any]) -> GroupMemberReference: """Transform a group member.""" - return GroupMember( + return GroupMemberReference( ieee=value[ATTR_IEEE], endpoint_id=value[ATTR_ENDPOINT_ID], ) @@ -519,7 +519,7 @@ async def websocket_add_group( zha_gateway = get_zha_gateway_proxy(hass) group_name: str = msg[GROUP_NAME] group_id: int | None = msg.get(GROUP_ID) - members: list[GroupMember] | None = msg.get(ATTR_MEMBERS) + members: list[GroupMemberReference] | None = msg.get(ATTR_MEMBERS) group = await zha_gateway.gateway.async_create_zigpy_group( group_name, members, group_id ) @@ -570,8 +570,9 @@ async def websocket_add_group_members( ) -> None: """Add members to a ZHA group.""" zha_gateway = get_zha_gateway(hass) + zha_gateway_proxy = get_zha_gateway_proxy(hass) group_id: int = msg[GROUP_ID] - members: list[GroupMember] = msg[ATTR_MEMBERS] + members: list[GroupMemberReference] = msg[ATTR_MEMBERS] if not (zha_group := zha_gateway.groups.get(group_id)): connection.send_message( @@ -582,8 +583,9 @@ async def websocket_add_group_members( return await zha_group.async_add_members(members) - ret_group = zha_group.group_info - connection.send_result(msg[ID], ret_group) + ret_group = zha_gateway_proxy.get_group_proxy(group_id) + assert ret_group + connection.send_result(msg[ID], ret_group.group_info) @websocket_api.require_admin @@ -600,8 +602,9 @@ async def websocket_remove_group_members( ) -> None: """Remove members from a ZHA group.""" zha_gateway = get_zha_gateway(hass) + zha_gateway_proxy = get_zha_gateway_proxy(hass) group_id: int = msg[GROUP_ID] - members: list[GroupMember] = msg[ATTR_MEMBERS] + members: list[GroupMemberReference] = msg[ATTR_MEMBERS] if not (zha_group := zha_gateway.groups.get(group_id)): connection.send_message( @@ -612,8 +615,9 @@ async def websocket_remove_group_members( return await zha_group.async_remove_members(members) - ret_group = zha_group.group_info - connection.send_result(msg[ID], ret_group) + ret_group = zha_gateway_proxy.get_group_proxy(group_id) + assert ret_group + connection.send_result(msg[ID], ret_group.group_info) @websocket_api.require_admin diff --git a/homeassistant/components/zwave_js/strings.json b/homeassistant/components/zwave_js/strings.json index 7c65f1804b1..4bba3e0538c 100644 --- a/homeassistant/components/zwave_js/strings.json +++ b/homeassistant/components/zwave_js/strings.json @@ -291,7 +291,7 @@ "name": "Clear lock user code" }, "invoke_cc_api": { - "description": "Calls a Command Class API on a node. Some Command Classes can't be fully controlled via the `set_value` service and require direct calls to the Command Class API.", + "description": "Calls a Command Class API on a node. Some Command Classes can't be fully controlled via the `set_value` action and require direct calls to the Command Class API.", "fields": { "command_class": { "description": "The ID of the command class that you want to issue a command to.", @@ -313,7 +313,7 @@ "name": "Invoke a Command Class API on a node (advanced)" }, "multicast_set_value": { - "description": "Changes any value that Z-Wave JS recognizes on multiple Z-Wave devices using multicast, so all devices receive the message simultaneously. This service has minimal validation so only use this service if you know what you are doing.", + "description": "Changes any value that Z-Wave JS recognizes on multiple Z-Wave devices using multicast, so all devices receive the message simultaneously. This action has minimal validation so only use this action if you know what you are doing.", "fields": { "broadcast": { "description": "Whether command should be broadcast to all devices on the network.", @@ -475,7 +475,7 @@ "name": "Set lock user code" }, "set_value": { - "description": "Changes any value that Z-Wave JS recognizes on a Z-Wave device. This service has minimal validation so only use this service if you know what you are doing.", + "description": "Changes any value that Z-Wave JS recognizes on a Z-Wave device. This action has minimal validation so only use this action if you know what you are doing.", "fields": { "command_class": { "description": "The ID of the command class for the value.", @@ -502,7 +502,7 @@ "name": "[%key:component::zwave_js::services::set_config_parameter::fields::value::name%]" }, "wait_for_result": { - "description": "Whether or not to wait for a response from the node. If not included in the payload, the integration will decide whether to wait or not. If set to `true`, note that the service call can take a while if setting a value on an asleep battery device.", + "description": "Whether or not to wait for a response from the node. If not included in the payload, the integration will decide whether to wait or not. If set to `true`, note that the action can take a while if setting a value on an asleep battery device.", "name": "Wait for result?" } }, diff --git a/homeassistant/generated/bluetooth.py b/homeassistant/generated/bluetooth.py index 33bd7456fa6..b370c161cc0 100644 --- a/homeassistant/generated/bluetooth.py +++ b/homeassistant/generated/bluetooth.py @@ -137,6 +137,31 @@ BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [ "domain": "govee_ble", "local_name": "B5178*", }, + { + "connectable": False, + "domain": "govee_ble", + "local_name": "GV5121*", + }, + { + "connectable": False, + "domain": "govee_ble", + "local_name": "GV5122*", + }, + { + "connectable": False, + "domain": "govee_ble", + "local_name": "GV5123*", + }, + { + "connectable": False, + "domain": "govee_ble", + "local_name": "GV5125*", + }, + { + "connectable": False, + "domain": "govee_ble", + "local_name": "GV5126*", + }, { "connectable": False, "domain": "govee_ble", @@ -221,6 +246,11 @@ BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [ "manufacturer_id": 19506, "service_uuid": "00001801-0000-1000-8000-00805f9b34fb", }, + { + "connectable": False, + "domain": "govee_ble", + "manufacturer_id": 61320, + }, { "domain": "homekit_controller", "manufacturer_data_start": [ diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 7fb57fa3f22..192a7dc37da 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -69,6 +69,7 @@ FLOWS = { "aurora", "aurora_abb_powerone", "aussie_broadband", + "autarco", "awair", "axis", "azure_data_explorer", @@ -273,6 +274,7 @@ FLOWS = { "ipp", "iqvia", "islamic_prayer_times", + "israel_rail", "iss", "ista_ecotrend", "isy994", @@ -477,6 +479,7 @@ FLOWS = { "rpi_power", "rtsp_to_webrtc", "ruckus_unleashed", + "russound_rio", "ruuvi_gateway", "ruuvitag_ble", "rympro", diff --git a/homeassistant/generated/dhcp.py b/homeassistant/generated/dhcp.py index e898f64d128..f6df799d01e 100644 --- a/homeassistant/generated/dhcp.py +++ b/homeassistant/generated/dhcp.py @@ -827,7 +827,7 @@ DHCP: Final[list[dict[str, str | bool]]] = [ }, { "domain": "tplink", - "hostname": "l5*", + "hostname": "l[59]*", "macaddress": "5CE931*", }, { @@ -837,9 +837,14 @@ DHCP: Final[list[dict[str, str | bool]]] = [ }, { "domain": "tplink", - "hostname": "l5*", + "hostname": "l[59]*", "macaddress": "5C628B*", }, + { + "domain": "tplink", + "hostname": "l[59]*", + "macaddress": "14EBB6*", + }, { "domain": "tplink", "hostname": "tp*", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 184514a7002..077b5ff2041 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -581,6 +581,12 @@ "config_flow": true, "iot_class": "cloud_polling" }, + "autarco": { + "name": "Autarco", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling" + }, "avion": { "name": "Avi-on", "integration_type": "hub", @@ -629,12 +635,6 @@ "config_flow": true, "iot_class": "local_push" }, - "bayesian": { - "name": "Bayesian", - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_polling" - }, "bbox": { "name": "Bbox", "integration_type": "hub", @@ -1840,12 +1840,6 @@ "config_flow": true, "iot_class": "local_polling" }, - "filter": { - "name": "Filter", - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_push" - }, "fints": { "name": "FinTS", "integration_type": "service", @@ -2909,6 +2903,12 @@ "integration_type": "virtual", "supported_by": "motion_blinds" }, + "israel_rail": { + "name": "Israel Railways", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling" + }, "iss": { "name": "International Space Station (ISS)", "integration_type": "service", @@ -5164,7 +5164,7 @@ "integrations": { "russound_rio": { "integration_type": "hub", - "config_flow": false, + "config_flow": true, "iot_class": "local_push", "name": "Russound RIO" }, @@ -7156,6 +7156,12 @@ } }, "helper": { + "bayesian": { + "name": "Bayesian", + "integration_type": "helper", + "config_flow": false, + "iot_class": "local_polling" + }, "counter": { "integration_type": "helper", "config_flow": false @@ -7165,6 +7171,12 @@ "config_flow": true, "iot_class": "calculated" }, + "filter": { + "name": "Filter", + "integration_type": "helper", + "config_flow": false, + "iot_class": "local_push" + }, "generic_hygrostat": { "integration_type": "helper", "config_flow": true, diff --git a/homeassistant/helpers/intent.py b/homeassistant/helpers/intent.py index 1bf78ae3a29..eeb160934ff 100644 --- a/homeassistant/helpers/intent.py +++ b/homeassistant/helpers/intent.py @@ -54,6 +54,8 @@ INTENT_DECREASE_TIMER = "HassDecreaseTimer" INTENT_PAUSE_TIMER = "HassPauseTimer" INTENT_UNPAUSE_TIMER = "HassUnpauseTimer" INTENT_TIMER_STATUS = "HassTimerStatus" +INTENT_GET_CURRENT_DATE = "HassGetCurrentDate" +INTENT_GET_CURRENT_TIME = "HassGetCurrentTime" SLOT_SCHEMA = vol.Schema({}, extra=vol.ALLOW_EXTRA) diff --git a/homeassistant/helpers/llm.py b/homeassistant/helpers/llm.py index 506cadbf168..52d7271c196 100644 --- a/homeassistant/helpers/llm.py +++ b/homeassistant/helpers/llm.py @@ -277,6 +277,8 @@ class AssistAPI(API): intent.INTENT_GET_STATE, intent.INTENT_NEVERMIND, intent.INTENT_TOGGLE, + intent.INTENT_GET_CURRENT_DATE, + intent.INTENT_GET_CURRENT_TIME, } def __init__(self, hass: HomeAssistant) -> None: diff --git a/homeassistant/helpers/template.py b/homeassistant/helpers/template.py index 9ab3f353dea..c21523baa38 100644 --- a/homeassistant/helpers/template.py +++ b/homeassistant/helpers/template.py @@ -2327,7 +2327,7 @@ def regex_match(value, find="", ignorecase=False): """Match value using regex.""" if not isinstance(value, str): value = str(value) - flags = re.I if ignorecase else 0 + flags = re.IGNORECASE if ignorecase else 0 return bool(_regex_cache(find, flags).match(value)) @@ -2338,7 +2338,7 @@ def regex_replace(value="", find="", replace="", ignorecase=False): """Replace using regex.""" if not isinstance(value, str): value = str(value) - flags = re.I if ignorecase else 0 + flags = re.IGNORECASE if ignorecase else 0 return _regex_cache(find, flags).sub(replace, value) @@ -2346,7 +2346,7 @@ def regex_search(value, find="", ignorecase=False): """Search using regex.""" if not isinstance(value, str): value = str(value) - flags = re.I if ignorecase else 0 + flags = re.IGNORECASE if ignorecase else 0 return bool(_regex_cache(find, flags).search(value)) @@ -2359,7 +2359,7 @@ def regex_findall(value, find="", ignorecase=False): """Find all matches using regex.""" if not isinstance(value, str): value = str(value) - flags = re.I if ignorecase else 0 + flags = re.IGNORECASE if ignorecase else 0 return _regex_cache(find, flags).findall(value) diff --git a/homeassistant/helpers/update_coordinator.py b/homeassistant/helpers/update_coordinator.py index c15dbb2d853..7cb1082d34d 100644 --- a/homeassistant/helpers/update_coordinator.py +++ b/homeassistant/helpers/update_coordinator.py @@ -6,6 +6,7 @@ from abc import abstractmethod import asyncio from collections.abc import Awaitable, Callable, Coroutine, Generator from datetime import datetime, timedelta +from functools import cached_property import logging from random import randint from time import monotonic @@ -471,7 +472,7 @@ class BaseCoordinatorEntity[ self.coordinator = coordinator self.coordinator_context = context - @property + @cached_property def should_poll(self) -> bool: """No need to poll. Coordinator notifies entity of updates.""" return False diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index cb153fc2a7c..2509737d87a 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -30,10 +30,10 @@ ha-av==10.1.1 ha-ffmpeg==3.2.0 habluetooth==3.1.3 hass-nabucasa==0.81.1 -hassil==1.7.3 +hassil==1.7.4 home-assistant-bluetooth==1.12.2 home-assistant-frontend==20240710.0 -home-assistant-intents==2024.7.3 +home-assistant-intents==2024.7.10 httpx==0.27.0 ifaddr==0.2.0 Jinja2==3.1.4 @@ -136,6 +136,9 @@ backoff>=2.0 # v2 has breaking changes (#99218). pydantic==1.10.17 +# Required for Python 3.12.4 compatibility (#119223). +mashumaro>=3.13.1 + # Breaks asyncio # https://github.com/pubnub/python/issues/130 pubnub!=6.4.0 diff --git a/homeassistant/util/color.py b/homeassistant/util/color.py index ab5c4037f9b..0745bc96dfb 100644 --- a/homeassistant/util/color.py +++ b/homeassistant/util/color.py @@ -244,7 +244,7 @@ def color_RGB_to_xy_brightness( y = Y / (X + Y + Z) # Brightness - Y = 1 if Y > 1 else Y + Y = min(Y, 1) brightness = round(Y * 255) # Check if the given xy value is within the color-reach of the lamp. diff --git a/mypy.ini b/mypy.ini index b24898b3287..cf16c4f5f63 100644 --- a/mypy.ini +++ b/mypy.ini @@ -732,6 +732,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.autarco.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.auth.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/pyproject.toml b/pyproject.toml index 5b41ca17a57..298f6a427da 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -693,7 +693,7 @@ exclude_lines = [ ] [tool.ruff] -required-version = ">=0.5.0" +required-version = ">=0.5.2" [tool.ruff.lint] select = [ @@ -725,6 +725,7 @@ select = [ "E", # pycodestyle "F", # pyflakes/autoflake "FLY", # flynt + "FURB", # refurb "G", # flake8-logging-format "I", # isort "INP", # flake8-no-pep420 @@ -746,6 +747,7 @@ select = [ "RUF006", # Store a reference to the return value of asyncio.create_task "RUF010", # Use explicit conversion flag "RUF013", # PEP 484 prohibits implicit Optional + "RUF017", # Avoid quadratic list summation "RUF018", # Avoid assignment expressions in assert statements "RUF019", # Unnecessary key check before dictionary access # "RUF100", # Unused `noqa` directive; temporarily every now and then to clean them up diff --git a/requirements_all.txt b/requirements_all.txt index 6821a8b2869..70dc90eed6f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -12,11 +12,8 @@ AIOSomecomfort==0.0.25 # homeassistant.components.adax Adax-local==0.1.5 -# homeassistant.components.blinksticklight -BlinkStick==1.2.0 - # homeassistant.components.doorbird -DoorBirdPy==2.1.0 +DoorBirdPy==3.0.2 # homeassistant.components.homekit HAP-python==4.9.1 @@ -60,10 +57,7 @@ PyFlume==0.6.5 PyFronius==0.7.3 # homeassistant.components.pyload -PyLoadAPI==1.2.0 - -# homeassistant.components.mvglive -PyMVGLive==1.1.4 +PyLoadAPI==1.3.2 # homeassistant.components.met_eireann PyMetEireann==2021.8.0 @@ -90,7 +84,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.48.0 +PySwitchbot==0.48.1 # homeassistant.components.switchmate PySwitchmate==0.5.1 @@ -182,10 +176,10 @@ aio-georss-gdacs==0.9 aioairq==0.3.2 # homeassistant.components.airzone_cloud -aioairzone-cloud==0.5.3 +aioairzone-cloud==0.5.4 # homeassistant.components.airzone -aioairzone==0.7.7 +aioairzone==0.8.0 # homeassistant.components.ambient_network # homeassistant.components.ambient_station @@ -195,7 +189,7 @@ aioambient==2024.01.0 aioapcaccess==0.4.2 # homeassistant.components.aquacell -aioaquacell==0.1.8 +aioaquacell==0.2.0 # homeassistant.components.aseko_pool_live aioaseko==0.1.1 @@ -213,7 +207,7 @@ aioazuredevops==2.1.1 aiobafi6==0.9.0 # homeassistant.components.aws -aiobotocore==2.13.0 +aiobotocore==2.13.1 # homeassistant.components.comelit aiocomelit==0.9.0 @@ -282,7 +276,7 @@ aiolifx-effects==0.3.2 aiolifx-themes==0.4.15 # homeassistant.components.lifx -aiolifx==1.0.4 +aiolifx==1.0.5 # homeassistant.components.livisi aiolivisi==0.0.19 @@ -344,7 +338,7 @@ aiopyarr==23.4.0 aioqsw==0.3.5 # homeassistant.components.rainforest_raven -aioraven==0.6.0 +aioraven==0.7.0 # homeassistant.components.recollect_waste aiorecollect==2023.09.0 @@ -365,7 +359,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==11.0.0 +aioshelly==11.1.0 # homeassistant.components.skybell aioskybell==22.7.0 @@ -516,6 +510,9 @@ auroranoaa==0.0.3 # homeassistant.components.aurora_abb_powerone aurorapy==0.2.7 +# homeassistant.components.autarco +autarco==2.0.0 + # homeassistant.components.avea # avea==1.5.1 @@ -613,7 +610,10 @@ boschshcpy==0.2.91 # homeassistant.components.amazon_polly # homeassistant.components.route53 -boto3==1.34.51 +boto3==1.34.131 + +# homeassistant.components.aws +botocore==1.34.131 # homeassistant.components.bring bring-api==0.7.1 @@ -669,9 +669,6 @@ colorlog==6.8.2 # homeassistant.components.color_extractor colorthief==0.2.1 -# homeassistant.components.concord232 -concord232==0.15 - # homeassistant.components.upc_connect connect-box==0.3.1 @@ -746,9 +743,6 @@ discogs-client==2.3.0 # homeassistant.components.steamist discovery30303==0.3.2 -# homeassistant.components.dovado -dovado==0.4.1 - # homeassistant.components.dremel_3d_printer dremel3dpy==2.1.1 @@ -995,7 +989,7 @@ goslide-api==0.5.1 gotailwind==0.2.3 # homeassistant.components.govee_ble -govee-ble==0.31.3 +govee-ble==0.33.0 # homeassistant.components.govee_light_local govee-local-api==1.5.1 @@ -1059,7 +1053,7 @@ hass-nabucasa==0.81.1 hass-splunk==0.1.1 # homeassistant.components.conversation -hassil==1.7.3 +hassil==1.7.4 # homeassistant.components.jewish_calendar hdate==0.10.9 @@ -1096,7 +1090,7 @@ holidays==0.52 home-assistant-frontend==20240710.0 # homeassistant.components.conversation -home-assistant-intents==2024.7.3 +home-assistant-intents==2024.7.10 # homeassistant.components.home_connect homeconnect==0.7.2 @@ -1140,7 +1134,7 @@ ical==8.1.1 icmplib==3.0 # homeassistant.components.idasen_desk -idasen-ha==2.6.1 +idasen-ha==2.6.2 # homeassistant.components.network ifaddr==0.2.0 @@ -1181,6 +1175,9 @@ isal==1.6.1 # homeassistant.components.gogogate2 ismartgate==5.0.1 +# homeassistant.components.israel_rail +israel-rail-api==0.1.2 + # homeassistant.components.abode jaraco.abode==5.2.1 @@ -1207,7 +1204,7 @@ kegtron-ble==0.4.0 kiwiki-client==0.1.1 # homeassistant.components.knocki -knocki==0.2.0 +knocki==0.3.1 # homeassistant.components.knx knx-frontend==2024.1.20.105944 @@ -1496,7 +1493,7 @@ openhomedevice==2.2.0 opensensemap-api==0.2.0 # homeassistant.components.enigma2 -openwebifpy==4.2.4 +openwebifpy==4.2.5 # homeassistant.components.luci openwrt-luci-rpc==1.1.17 @@ -1505,7 +1502,7 @@ openwrt-luci-rpc==1.1.17 openwrt-ubus-rpc==0.0.2 # homeassistant.components.opower -opower==0.4.7 +opower==0.5.2 # homeassistant.components.oralb oralb-ble==0.17.6 @@ -2151,7 +2148,7 @@ pyschlage==2024.6.0 pysensibo==1.0.36 # homeassistant.components.serial -pyserial-asyncio-fast==0.11 +pyserial-asyncio-fast==0.13 # homeassistant.components.acer_projector # homeassistant.components.crownstone @@ -2214,7 +2211,7 @@ pyswitchbee==1.8.0 pytautulli==23.1.1 # homeassistant.components.tedee -pytedee-async==0.2.17 +pytedee-async==0.2.20 # homeassistant.components.tfiac pytfiac==0.4 @@ -2262,7 +2259,7 @@ python-gc100==1.0.3a0 python-gitlab==1.6.0 # homeassistant.components.analytics_insights -python-homeassistant-analytics==0.6.0 +python-homeassistant-analytics==0.7.0 # homeassistant.components.homewizard python-homewizard-energy==v6.0.0 @@ -2280,7 +2277,7 @@ python-join-api==0.0.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.7.0.3 +python-kasa[speedups]==0.7.0.4 # homeassistant.components.lirc # python-lirc==1.2.3 @@ -2667,7 +2664,7 @@ stringcase==1.2.0 subarulink==0.7.11 # homeassistant.components.sunweg -sunweg==3.0.1 +sunweg==3.0.2 # homeassistant.components.surepetcare surepy==0.9.0 @@ -2725,7 +2722,7 @@ tesla-powerwall==0.5.2 tesla-wall-connector==1.0.2 # homeassistant.components.tessie -tessie-api==0.0.9 +tessie-api==0.1.1 # homeassistant.components.tensorflow # tf-models-official==2.5.0 @@ -2794,7 +2791,7 @@ twitchAPI==4.0.0 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==5.2.2 +uiprotect==5.3.0 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 @@ -2809,10 +2806,10 @@ unifi_ap==0.0.1 unifiled==0.11 # homeassistant.components.zha -universal-silabs-flasher==0.0.20 +universal-silabs-flasher==0.0.21 # homeassistant.components.upb -upb-lib==0.5.7 +upb-lib==0.5.8 # homeassistant.components.upcloud upcloud-api==2.5.1 @@ -2938,7 +2935,7 @@ yalesmartalarmclient==0.3.9 yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==6.4.2 +yalexs==6.4.3 # homeassistant.components.yeelight yeelight==0.7.14 @@ -2971,7 +2968,7 @@ zeroconf==0.132.2 zeversolar==0.3.1 # homeassistant.components.zha -zha==0.0.19 +zha==0.0.23 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.12 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8d68a14551b..568528951d5 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -13,7 +13,7 @@ AIOSomecomfort==0.0.25 Adax-local==0.1.5 # homeassistant.components.doorbird -DoorBirdPy==2.1.0 +DoorBirdPy==3.0.2 # homeassistant.components.homekit HAP-python==4.9.1 @@ -51,7 +51,7 @@ PyFlume==0.6.5 PyFronius==0.7.3 # homeassistant.components.pyload -PyLoadAPI==1.2.0 +PyLoadAPI==1.3.2 # homeassistant.components.met_eireann PyMetEireann==2021.8.0 @@ -78,7 +78,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.48.0 +PySwitchbot==0.48.1 # homeassistant.components.syncthru PySyncThru==0.7.10 @@ -161,10 +161,10 @@ aio-georss-gdacs==0.9 aioairq==0.3.2 # homeassistant.components.airzone_cloud -aioairzone-cloud==0.5.3 +aioairzone-cloud==0.5.4 # homeassistant.components.airzone -aioairzone==0.7.7 +aioairzone==0.8.0 # homeassistant.components.ambient_network # homeassistant.components.ambient_station @@ -174,7 +174,7 @@ aioambient==2024.01.0 aioapcaccess==0.4.2 # homeassistant.components.aquacell -aioaquacell==0.1.8 +aioaquacell==0.2.0 # homeassistant.components.aseko_pool_live aioaseko==0.1.1 @@ -192,7 +192,7 @@ aioazuredevops==2.1.1 aiobafi6==0.9.0 # homeassistant.components.aws -aiobotocore==2.13.0 +aiobotocore==2.13.1 # homeassistant.components.comelit aiocomelit==0.9.0 @@ -255,7 +255,7 @@ aiolifx-effects==0.3.2 aiolifx-themes==0.4.15 # homeassistant.components.lifx -aiolifx==1.0.4 +aiolifx==1.0.5 # homeassistant.components.livisi aiolivisi==0.0.19 @@ -317,7 +317,7 @@ aiopyarr==23.4.0 aioqsw==0.3.5 # homeassistant.components.rainforest_raven -aioraven==0.6.0 +aioraven==0.7.0 # homeassistant.components.recollect_waste aiorecollect==2023.09.0 @@ -328,6 +328,9 @@ aioridwell==2024.01.0 # homeassistant.components.ruckus_unleashed aioruckus==0.34 +# homeassistant.components.russound_rio +aiorussound==1.1.2 + # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 @@ -335,7 +338,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==11.0.0 +aioshelly==11.1.0 # homeassistant.components.skybell aioskybell==22.7.0 @@ -459,6 +462,9 @@ auroranoaa==0.0.3 # homeassistant.components.aurora_abb_powerone aurorapy==0.2.7 +# homeassistant.components.autarco +autarco==2.0.0 + # homeassistant.components.axis axis==62 @@ -523,6 +529,9 @@ bond-async==0.2.1 # homeassistant.components.bosch_shc boschshcpy==0.2.91 +# homeassistant.components.aws +botocore==1.34.131 + # homeassistant.components.bring bring-api==0.7.1 @@ -818,7 +827,7 @@ googlemaps==2.5.1 gotailwind==0.2.3 # homeassistant.components.govee_ble -govee-ble==0.31.3 +govee-ble==0.33.0 # homeassistant.components.govee_light_local govee-local-api==1.5.1 @@ -870,7 +879,7 @@ habluetooth==3.1.3 hass-nabucasa==0.81.1 # homeassistant.components.conversation -hassil==1.7.3 +hassil==1.7.4 # homeassistant.components.jewish_calendar hdate==0.10.9 @@ -898,7 +907,7 @@ holidays==0.52 home-assistant-frontend==20240710.0 # homeassistant.components.conversation -home-assistant-intents==2024.7.3 +home-assistant-intents==2024.7.10 # homeassistant.components.home_connect homeconnect==0.7.2 @@ -933,7 +942,7 @@ ical==8.1.1 icmplib==3.0 # homeassistant.components.idasen_desk -idasen-ha==2.6.1 +idasen-ha==2.6.2 # homeassistant.components.network ifaddr==0.2.0 @@ -965,6 +974,9 @@ isal==1.6.1 # homeassistant.components.gogogate2 ismartgate==5.0.1 +# homeassistant.components.israel_rail +israel-rail-api==0.1.2 + # homeassistant.components.abode jaraco.abode==5.2.1 @@ -982,7 +994,7 @@ justnimbus==0.7.4 kegtron-ble==0.4.0 # homeassistant.components.knocki -knocki==0.2.0 +knocki==0.3.1 # homeassistant.components.knx knx-frontend==2024.1.20.105944 @@ -1208,10 +1220,10 @@ openerz-api==0.3.0 openhomedevice==2.2.0 # homeassistant.components.enigma2 -openwebifpy==4.2.4 +openwebifpy==4.2.5 # homeassistant.components.opower -opower==0.4.7 +opower==0.5.2 # homeassistant.components.oralb oralb-ble==0.17.6 @@ -1741,7 +1753,7 @@ pyswitchbee==1.8.0 pytautulli==23.1.1 # homeassistant.components.tedee -pytedee-async==0.2.17 +pytedee-async==0.2.20 # homeassistant.components.motionmount python-MotionMount==2.0.0 @@ -1762,7 +1774,7 @@ python-fullykiosk==0.0.14 # python-gammu==3.2.4 # homeassistant.components.analytics_insights -python-homeassistant-analytics==0.6.0 +python-homeassistant-analytics==0.7.0 # homeassistant.components.homewizard python-homewizard-energy==v6.0.0 @@ -1774,7 +1786,7 @@ python-izone==1.2.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.7.0.3 +python-kasa[speedups]==0.7.0.4 # homeassistant.components.matter python-matter-server==6.2.2 @@ -2083,7 +2095,7 @@ stringcase==1.2.0 subarulink==0.7.11 # homeassistant.components.sunweg -sunweg==3.0.1 +sunweg==3.0.2 # homeassistant.components.surepetcare surepy==0.9.0 @@ -2120,7 +2132,7 @@ tesla-powerwall==0.5.2 tesla-wall-connector==1.0.2 # homeassistant.components.tessie -tessie-api==0.0.9 +tessie-api==0.1.1 # homeassistant.components.thermobeacon thermobeacon-ble==0.7.0 @@ -2171,7 +2183,7 @@ twitchAPI==4.0.0 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==5.2.2 +uiprotect==5.3.0 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 @@ -2180,10 +2192,10 @@ ultraheat-api==0.5.7 unifi-discovery==1.2.0 # homeassistant.components.zha -universal-silabs-flasher==0.0.20 +universal-silabs-flasher==0.0.21 # homeassistant.components.upb -upb-lib==0.5.7 +upb-lib==0.5.8 # homeassistant.components.upcloud upcloud-api==2.5.1 @@ -2294,7 +2306,7 @@ yalesmartalarmclient==0.3.9 yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==6.4.2 +yalexs==6.4.3 # homeassistant.components.yeelight yeelight==0.7.14 @@ -2321,7 +2333,7 @@ zeroconf==0.132.2 zeversolar==0.3.1 # homeassistant.components.zha -zha==0.0.19 +zha==0.0.23 # homeassistant.components.zwave_js zwave-js-server-python==0.57.0 diff --git a/requirements_test_pre_commit.txt b/requirements_test_pre_commit.txt index d4cbbbda867..80add9a50ee 100644 --- a/requirements_test_pre_commit.txt +++ b/requirements_test_pre_commit.txt @@ -1,5 +1,5 @@ # Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit codespell==2.3.0 -ruff==0.5.1 +ruff==0.5.2 yamllint==1.35.1 diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index 434b4d0071f..3c593a2bdf7 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -157,6 +157,9 @@ backoff>=2.0 # v2 has breaking changes (#99218). pydantic==1.10.17 +# Required for Python 3.12.4 compatibility (#119223). +mashumaro>=3.13.1 + # Breaks asyncio # https://github.com/pubnub/python/issues/130 pubnub!=6.4.0 diff --git a/script/hassfest/config_schema.py b/script/hassfest/config_schema.py index 141b087472b..4d3f0cde482 100644 --- a/script/hassfest/config_schema.py +++ b/script/hassfest/config_schema.py @@ -21,7 +21,7 @@ def _has_assignment(module: ast.Module, name: str) -> bool: for item in module.body: if type(item) not in (ast.Assign, ast.AnnAssign, ast.AugAssign): continue - if type(item) == ast.Assign: + if type(item) is ast.Assign: for target in item.targets: if getattr(target, "id", None) == name: return True @@ -35,7 +35,7 @@ def _has_function( module: ast.Module, _type: ast.AsyncFunctionDef | ast.FunctionDef, name: str ) -> bool: """Test if the module defines a function.""" - return any(type(item) == _type and item.name == name for item in module.body) + return any(type(item) is _type and item.name == name for item in module.body) def _has_import(module: ast.Module, name: str) -> bool: diff --git a/script/licenses.py b/script/licenses.py index b560d709d33..4e5539c46b1 100644 --- a/script/licenses.py +++ b/script/licenses.py @@ -134,6 +134,7 @@ EXCEPTIONS = { "apple_weatherkit", # https://github.com/tjhorner/python-weatherkit/pull/3 "asyncio", # PSF License "chacha20poly1305", # LGPL + "chacha20poly1305-reuseable", # Apache 2.0 or BSD 3-Clause "commentjson", # https://github.com/vaidik/commentjson/pull/55 "crownstone-cloud", # https://github.com/crownstone/crownstone-lib-python-cloud/pull/5 "crownstone-core", # https://github.com/crownstone/crownstone-lib-python-core/pull/6 @@ -156,6 +157,7 @@ EXCEPTIONS = { "nsw-fuel-api-client", # https://github.com/nickw444/nsw-fuel-api-client/pull/14 "pigpio", # https://github.com/joan2937/pigpio/pull/608 "pyEmby", # https://github.com/mezz64/pyEmby/pull/12 + "pymitv", # MIT "pyTibber", # https://github.com/Danielhiversen/pyTibber/pull/294 "pybbox", # https://github.com/HydrelioxGitHub/pybbox/pull/5 "pyeconet", # https://github.com/w1ll1am23/pyeconet/pull/41 @@ -178,36 +180,19 @@ EXCEPTIONS = { } TODO = { - "BlinkStick": AwesomeVersion( - "1.2.0" - ), # Proprietary license https://github.com/arvydas/blinkstick-python - "PyMVGLive": AwesomeVersion( - "1.1.4" - ), # No license and archived https://github.com/pc-coholic/PyMVGLive "aiocache": AwesomeVersion( "0.12.2" ), # https://github.com/aio-libs/aiocache/blob/master/LICENSE all rights reserved? "asterisk_mbox": AwesomeVersion( "0.5.0" ), # No license, integration is deprecated and scheduled for removal in 2024.9.0 - "chacha20poly1305-reuseable": AwesomeVersion("0.12.1"), # has 2 licenses - "concord232": AwesomeVersion( - "0.15" - ), # No license https://github.com/JasonCarter80/concord232/issues/19 - "dovado": AwesomeVersion( - "0.4.1" - ), # No license https://github.com/molobrakos/dovado/issues/4 "mficlient": AwesomeVersion( "0.3.0" ), # No license https://github.com/kk7ds/mficlient/issues/4 - "pubnub": AwesomeVersion( - "8.0.0" - ), # Proprietary license https://github.com/pubnub/python/blob/master/LICENSE "pyElectra": AwesomeVersion( "1.2.3" ), # No License https://github.com/jafar-atili/pyElectra/issues/3 "pyflic": AwesomeVersion("2.0.3"), # No OSI approved license CC0-1.0 Universal) - "pymitv": AwesomeVersion("1.4.3"), # Not sure why pip-licenses doesn't pick this up "refoss_ha": AwesomeVersion( "1.2.1" ), # No License https://github.com/ashionky/refoss_ha/issues/4 @@ -237,20 +222,20 @@ def main() -> int: f"{package.name}@{package.version}: {package.license}" ) print("Please remove the package from the TODO list.") - print("") + print() else: print( "We could not detect an OSI-approved license for " f"{package.name}@{package.version}: {package.license}" ) - print("") + print() exit_code = 1 elif not approved and package.name not in EXCEPTIONS: print( "We could not detect an OSI-approved license for" f"{package.name}@{package.version}: {package.license}" ) - print("") + print() exit_code = 1 elif approved and package.name in EXCEPTIONS: print( @@ -258,7 +243,7 @@ def main() -> int: f"{package.name}@{package.version}: {package.license}" ) print(f"Please remove the package from the EXCEPTIONS list: {package.name}") - print("") + print() exit_code = 1 current_packages = {package.name for package in package_definitions} for package in [*TODO.keys(), *EXCEPTIONS]: @@ -267,7 +252,7 @@ def main() -> int: f"Package {package} is tracked, but not used. Please remove from the licenses.py" "file." ) - print("") + print() exit_code = 1 return exit_code diff --git a/tests/components/airzone/snapshots/test_diagnostics.ambr b/tests/components/airzone/snapshots/test_diagnostics.ambr index adf0176765c..2adf50558e0 100644 --- a/tests/components/airzone/snapshots/test_diagnostics.ambr +++ b/tests/components/airzone/snapshots/test_diagnostics.ambr @@ -267,10 +267,6 @@ 'temp-set': 45, 'temp-unit': 0, }), - 'new-systems': list([ - ]), - 'new-zones': list([ - ]), 'num-systems': 3, 'num-zones': 7, 'systems': dict({ diff --git a/tests/components/airzone/test_coordinator.py b/tests/components/airzone/test_coordinator.py index 06c77bebb81..583758a6bee 100644 --- a/tests/components/airzone/test_coordinator.py +++ b/tests/components/airzone/test_coordinator.py @@ -8,6 +8,7 @@ from aioairzone.exceptions import ( InvalidMethod, SystemOutOfRange, ) +from freezegun.api import FrozenDateTimeFactory from homeassistant.components.airzone.const import DOMAIN from homeassistant.components.airzone.coordinator import SCAN_INTERVAL @@ -15,7 +16,7 @@ from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.util.dt import utcnow -from .util import CONFIG, HVAC_MOCK, HVAC_VERSION_MOCK +from .util import CONFIG, HVAC_MOCK, HVAC_MOCK_NEW_ZONES, HVAC_VERSION_MOCK from tests.common import MockConfigEntry, async_fire_time_changed @@ -64,3 +65,62 @@ async def test_coordinator_client_connector_error(hass: HomeAssistant) -> None: state = hass.states.get("sensor.despacho_temperature") assert state.state == STATE_UNAVAILABLE + + +async def test_coordinator_new_devices( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, +) -> None: + """Test new devices on coordinator update.""" + + config_entry = MockConfigEntry( + data=CONFIG, + domain=DOMAIN, + unique_id="airzone_unique_id", + ) + config_entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.airzone.AirzoneLocalApi.get_dhw", + side_effect=HotWaterNotAvailable, + ), + patch( + "homeassistant.components.airzone.AirzoneLocalApi.get_hvac", + return_value=HVAC_MOCK_NEW_ZONES, + ) as mock_hvac, + patch( + "homeassistant.components.airzone.AirzoneLocalApi.get_hvac_systems", + side_effect=SystemOutOfRange, + ), + patch( + "homeassistant.components.airzone.AirzoneLocalApi.get_version", + return_value=HVAC_VERSION_MOCK, + ), + patch( + "homeassistant.components.airzone.AirzoneLocalApi.get_webserver", + side_effect=InvalidMethod, + ), + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + mock_hvac.assert_called_once() + mock_hvac.reset_mock() + + state = hass.states.get("sensor.salon_temperature") + assert state.state == "19.6" + + state = hass.states.get("sensor.dorm_ppal_temperature") + assert state is None + + mock_hvac.return_value = HVAC_MOCK + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + mock_hvac.assert_called_once() + + state = hass.states.get("sensor.salon_temperature") + assert state.state == "19.6" + + state = hass.states.get("sensor.dorm_ppal_temperature") + assert state.state == "21.1" diff --git a/tests/components/airzone/util.py b/tests/components/airzone/util.py index 6e3e0eccc8f..2cdb7a9c6f9 100644 --- a/tests/components/airzone/util.py +++ b/tests/components/airzone/util.py @@ -1,5 +1,6 @@ """Tests for the Airzone integration.""" +from copy import deepcopy from unittest.mock import patch from aioairzone.const import ( @@ -274,6 +275,16 @@ HVAC_MOCK = { ] } +HVAC_MOCK_NEW_ZONES = { + API_SYSTEMS: [ + { + API_DATA: [ + deepcopy(HVAC_MOCK[API_SYSTEMS][0][API_DATA][0]), + ] + } + ] +} + HVAC_DHW_MOCK = { API_DATA: { API_SYSTEM_ID: 0, diff --git a/tests/components/alexa/test_smart_home.py b/tests/components/alexa/test_smart_home.py index d502dce7d01..fb27c91eea7 100644 --- a/tests/components/alexa/test_smart_home.py +++ b/tests/components/alexa/test_smart_home.py @@ -1979,7 +1979,7 @@ async def test_cover_position( "friendly_name": "Test cover range", "device_class": "blind", "supported_features": supported_features, - "position": position, + "current_position": position, }, ) appliance = await discovery_test(device, hass) @@ -2296,7 +2296,7 @@ async def test_cover_position_range( "friendly_name": "Test cover range", "device_class": "blind", "supported_features": 7, - "position": 30, + "current_position": 30, }, ) appliance = await discovery_test(device, hass) @@ -4658,7 +4658,7 @@ async def test_cover_semantics_position_and_tilt(hass: HomeAssistant) -> None: "friendly_name": "Test cover semantics", "device_class": "blind", "supported_features": 255, - "position": 30, + "current_position": 30, "tilt_position": 30, }, ) diff --git a/tests/components/autarco/__init__.py b/tests/components/autarco/__init__.py new file mode 100644 index 00000000000..208e5999fc7 --- /dev/null +++ b/tests/components/autarco/__init__.py @@ -0,0 +1,12 @@ +"""Tests for the Autarco integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the integration.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/autarco/conftest.py b/tests/components/autarco/conftest.py new file mode 100644 index 00000000000..c7a95d7aa23 --- /dev/null +++ b/tests/components/autarco/conftest.py @@ -0,0 +1,82 @@ +"""Common fixtures for the Autarco tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +from autarco import AccountSite, Inverter, Solar +import pytest + +from homeassistant.components.autarco.const import DOMAIN +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.autarco.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_autarco_client() -> Generator[AsyncMock]: + """Mock a Autarco client.""" + with ( + patch( + "homeassistant.components.autarco.Autarco", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.autarco.config_flow.Autarco", + new=mock_client, + ), + ): + client = mock_client.return_value + client.get_account.return_value = [ + AccountSite( + site_id=1, + public_key="key-public", + system_name="test-system", + retailer="test-retailer", + health="OK", + ) + ] + client.get_solar.return_value = Solar( + power_production=200, + energy_production_today=4, + energy_production_month=58, + energy_production_total=10379, + ) + client.get_inverters.return_value = { + "test-serial-1": Inverter( + serial_number="test-serial-1", + out_ac_power=200, + out_ac_energy_total=10379, + grid_turned_off=False, + health="OK", + ), + "test-serial-2": Inverter( + serial_number="test-serial-2", + out_ac_power=500, + out_ac_energy_total=10379, + grid_turned_off=False, + health="OK", + ), + } + yield client + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="Autarco", + data={ + CONF_EMAIL: "test@autarco.com", + CONF_PASSWORD: "test-password", + }, + ) diff --git a/tests/components/autarco/snapshots/test_diagnostics.ambr b/tests/components/autarco/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..53d9f96fb86 --- /dev/null +++ b/tests/components/autarco/snapshots/test_diagnostics.ambr @@ -0,0 +1,34 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'sites_data': list([ + dict({ + 'health': 'OK', + 'id': 1, + 'inverters': list([ + dict({ + 'grid_turned_off': False, + 'health': 'OK', + 'out_ac_energy_total': 10379, + 'out_ac_power': 200, + 'serial_number': 'test-serial-1', + }), + dict({ + 'grid_turned_off': False, + 'health': 'OK', + 'out_ac_energy_total': 10379, + 'out_ac_power': 500, + 'serial_number': 'test-serial-2', + }), + ]), + 'name': 'test-system', + 'solar': dict({ + 'energy_production_month': 58, + 'energy_production_today': 4, + 'energy_production_total': 10379, + 'power_production': 200, + }), + }), + ]), + }) +# --- diff --git a/tests/components/autarco/snapshots/test_sensor.ambr b/tests/components/autarco/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..2ff0236a59f --- /dev/null +++ b/tests/components/autarco/snapshots/test_sensor.ambr @@ -0,0 +1,805 @@ +# serializer version: 1 +# name: test_all_sensors[sensor.inverter_test_serial_1_energy_ac_output_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_test_serial_1_energy_ac_output_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy AC output total', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'out_ac_energy_total', + 'unique_id': 'test-serial-1_out_ac_energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.inverter_test_serial_1_energy_ac_output_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Inverter test-serial-1 Energy AC output total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_test_serial_1_energy_ac_output_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10379', + }) +# --- +# name: test_all_sensors[sensor.inverter_test_serial_1_power_ac_output-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_test_serial_1_power_ac_output', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power AC output', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'out_ac_power', + 'unique_id': 'test-serial-1_out_ac_power', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.inverter_test_serial_1_power_ac_output-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Inverter test-serial-1 Power AC output', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_test_serial_1_power_ac_output', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '200', + }) +# --- +# name: test_all_sensors[sensor.inverter_test_serial_2_energy_ac_output_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_test_serial_2_energy_ac_output_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy AC output total', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'out_ac_energy_total', + 'unique_id': 'test-serial-2_out_ac_energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.inverter_test_serial_2_energy_ac_output_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Inverter test-serial-2 Energy AC output total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_test_serial_2_energy_ac_output_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10379', + }) +# --- +# name: test_all_sensors[sensor.inverter_test_serial_2_power_ac_output-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_test_serial_2_power_ac_output', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power AC output', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'out_ac_power', + 'unique_id': 'test-serial-2_out_ac_power', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.inverter_test_serial_2_power_ac_output-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Inverter test-serial-2 Power AC output', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_test_serial_2_power_ac_output', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '500', + }) +# --- +# name: test_all_sensors[sensor.solar_energy_production_month-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solar_energy_production_month', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy production month', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_production_month', + 'unique_id': '1_solar_energy_production_month', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.solar_energy_production_month-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Solar Energy production month', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solar_energy_production_month', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '58', + }) +# --- +# name: test_all_sensors[sensor.solar_energy_production_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solar_energy_production_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy production today', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_production_today', + 'unique_id': '1_solar_energy_production_today', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.solar_energy_production_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Solar Energy production today', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solar_energy_production_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- +# name: test_all_sensors[sensor.solar_energy_production_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solar_energy_production_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy production total', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_production_total', + 'unique_id': '1_solar_energy_production_total', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.solar_energy_production_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Solar Energy production total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solar_energy_production_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10379', + }) +# --- +# name: test_all_sensors[sensor.solar_power_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solar_power_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power production', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_production', + 'unique_id': '1_solar_power_production', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.solar_power_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Solar Power production', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solar_power_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '200', + }) +# --- +# name: test_solar_sensors[sensor.inverter_test_serial_1_energy_ac_output_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_test_serial_1_energy_ac_output_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy AC output total', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'out_ac_energy_total', + 'unique_id': 'test-serial-1_out_ac_energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_solar_sensors[sensor.inverter_test_serial_1_energy_ac_output_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Inverter test-serial-1 Energy AC output total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_test_serial_1_energy_ac_output_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10379', + }) +# --- +# name: test_solar_sensors[sensor.inverter_test_serial_1_power_ac_output-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_test_serial_1_power_ac_output', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power AC output', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'out_ac_power', + 'unique_id': 'test-serial-1_out_ac_power', + 'unit_of_measurement': , + }) +# --- +# name: test_solar_sensors[sensor.inverter_test_serial_1_power_ac_output-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Inverter test-serial-1 Power AC output', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_test_serial_1_power_ac_output', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '200', + }) +# --- +# name: test_solar_sensors[sensor.inverter_test_serial_2_energy_ac_output_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_test_serial_2_energy_ac_output_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy AC output total', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'out_ac_energy_total', + 'unique_id': 'test-serial-2_out_ac_energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_solar_sensors[sensor.inverter_test_serial_2_energy_ac_output_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Inverter test-serial-2 Energy AC output total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_test_serial_2_energy_ac_output_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10379', + }) +# --- +# name: test_solar_sensors[sensor.inverter_test_serial_2_power_ac_output-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_test_serial_2_power_ac_output', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power AC output', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'out_ac_power', + 'unique_id': 'test-serial-2_out_ac_power', + 'unit_of_measurement': , + }) +# --- +# name: test_solar_sensors[sensor.inverter_test_serial_2_power_ac_output-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Inverter test-serial-2 Power AC output', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_test_serial_2_power_ac_output', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '500', + }) +# --- +# name: test_solar_sensors[sensor.solar_energy_production_month-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solar_energy_production_month', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy production month', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_production_month', + 'unique_id': '1_solar_energy_production_month', + 'unit_of_measurement': , + }) +# --- +# name: test_solar_sensors[sensor.solar_energy_production_month-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Solar Energy production month', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solar_energy_production_month', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '58', + }) +# --- +# name: test_solar_sensors[sensor.solar_energy_production_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solar_energy_production_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy production today', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_production_today', + 'unique_id': '1_solar_energy_production_today', + 'unit_of_measurement': , + }) +# --- +# name: test_solar_sensors[sensor.solar_energy_production_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Solar Energy production today', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solar_energy_production_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- +# name: test_solar_sensors[sensor.solar_energy_production_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solar_energy_production_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy production total', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_production_total', + 'unique_id': '1_solar_energy_production_total', + 'unit_of_measurement': , + }) +# --- +# name: test_solar_sensors[sensor.solar_energy_production_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Solar Energy production total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solar_energy_production_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10379', + }) +# --- +# name: test_solar_sensors[sensor.solar_power_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solar_power_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power production', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_production', + 'unique_id': '1_solar_power_production', + 'unit_of_measurement': , + }) +# --- +# name: test_solar_sensors[sensor.solar_power_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Solar Power production', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solar_power_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '200', + }) +# --- diff --git a/tests/components/autarco/test_config_flow.py b/tests/components/autarco/test_config_flow.py new file mode 100644 index 00000000000..621ad7f55c8 --- /dev/null +++ b/tests/components/autarco/test_config_flow.py @@ -0,0 +1,101 @@ +"""Test the Autarco config flow.""" + +from unittest.mock import AsyncMock + +from autarco import AutarcoAuthenticationError, AutarcoConnectionError +import pytest + +from homeassistant.components.autarco.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_full_user_flow( + hass: HomeAssistant, + mock_autarco_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test the full user configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "user" + assert not result.get("errors") + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_EMAIL: "test@autarco.com", CONF_PASSWORD: "test-password"}, + ) + + assert result.get("type") is FlowResultType.CREATE_ENTRY + assert result.get("title") == "test@autarco.com" + assert result.get("data") == { + CONF_EMAIL: "test@autarco.com", + CONF_PASSWORD: "test-password", + } + assert len(mock_autarco_client.get_account.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_duplicate_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_autarco_client: AsyncMock, +) -> None: + """Test abort when setting up duplicate entry.""" + mock_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result.get("type") is FlowResultType.FORM + assert not result.get("errors") + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_EMAIL: "test@autarco.com", CONF_PASSWORD: "test-password"}, + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "already_configured" + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (AutarcoConnectionError, "cannot_connect"), + (AutarcoAuthenticationError, "invalid_auth"), + ], +) +async def test_exceptions( + hass: HomeAssistant, + mock_autarco_client: AsyncMock, + mock_setup_entry: AsyncMock, + exception: Exception, + error: str, +) -> None: + """Test exceptions.""" + mock_autarco_client.get_account.side_effect = exception + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_EMAIL: "test@autarco.com", CONF_PASSWORD: "test-password"}, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("errors") == {"base": error} + + mock_autarco_client.get_account.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_EMAIL: "test@autarco.com", CONF_PASSWORD: "test-password"}, + ) + assert result.get("type") is FlowResultType.CREATE_ENTRY diff --git a/tests/components/autarco/test_diagnostics.py b/tests/components/autarco/test_diagnostics.py new file mode 100644 index 00000000000..1d12a2c1894 --- /dev/null +++ b/tests/components/autarco/test_diagnostics.py @@ -0,0 +1,30 @@ +"""Test Autarco diagnostics.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_autarco_client: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + await setup_integration(hass, mock_config_entry) + + result = await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) + + assert result == snapshot diff --git a/tests/components/autarco/test_init.py b/tests/components/autarco/test_init.py new file mode 100644 index 00000000000..81c5f947251 --- /dev/null +++ b/tests/components/autarco/test_init.py @@ -0,0 +1,28 @@ +"""Test the Autarco init module.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_load_unload_entry( + hass: HomeAssistant, + mock_autarco_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test load and unload entry.""" + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_remove(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/autarco/test_sensor.py b/tests/components/autarco/test_sensor.py new file mode 100644 index 00000000000..e5e823501b9 --- /dev/null +++ b/tests/components/autarco/test_sensor.py @@ -0,0 +1,27 @@ +"""Test the sensor provided by the Autarco integration.""" + +from unittest.mock import MagicMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_sensors( + hass: HomeAssistant, + mock_autarco_client: MagicMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the Autarco sensors.""" + with patch("homeassistant.components.autarco.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr b/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr index bee9b70c490..8a26acd1040 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr @@ -406,7 +406,7 @@ 'suggested_display_precision': 0, }), }), - 'original_device_class': , + 'original_device_class': , 'original_icon': None, 'original_name': 'Remaining fuel', 'platform': 'bmw_connected_drive', @@ -420,7 +420,7 @@ # name: test_entity_state_attrs[sensor.i3_rex_remaining_fuel-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'volume', + 'device_class': 'volume_storage', 'friendly_name': 'i3 (+ REX) Remaining fuel', 'state_class': , 'unit_of_measurement': , @@ -3253,7 +3253,7 @@ 'suggested_display_precision': 0, }), }), - 'original_device_class': , + 'original_device_class': , 'original_icon': None, 'original_name': 'Remaining fuel', 'platform': 'bmw_connected_drive', @@ -3267,7 +3267,7 @@ # name: test_entity_state_attrs[sensor.m340i_xdrive_remaining_fuel-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'volume', + 'device_class': 'volume_storage', 'friendly_name': 'M340i xDrive Remaining fuel', 'state_class': , 'unit_of_measurement': , diff --git a/tests/components/broadlink/__init__.py b/tests/components/broadlink/__init__.py index 1c87de8d9e2..61ef27815fd 100644 --- a/tests/components/broadlink/__init__.py +++ b/tests/components/broadlink/__init__.py @@ -89,6 +89,16 @@ BROADLINK_DEVICES = { 57, 5, ), + "Guest room": ( + "192.168.0.66", + "34ea34b61d2e", + "HY02/HY03", + "Hysen", + "HYS", + 0x4EAD, + 10024, + 5, + ), } diff --git a/tests/components/broadlink/test_select.py b/tests/components/broadlink/test_select.py new file mode 100644 index 00000000000..42715c9a5ab --- /dev/null +++ b/tests/components/broadlink/test_select.py @@ -0,0 +1,67 @@ +"""Tests for Broadlink select.""" + +from homeassistant.components.broadlink.const import DOMAIN +from homeassistant.components.select import ( + ATTR_OPTION, + DOMAIN as SELECT_DOMAIN, + SERVICE_SELECT_OPTION, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers.entity_component import async_update_entity + +from . import get_device + + +async def test_select( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test Broadlink select.""" + await hass.config.async_set_time_zone("UTC") + + device = get_device("Guest room") + mock_setup = await device.setup_entry(hass) + + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_setup.entry.unique_id)} + ) + entries = er.async_entries_for_device(entity_registry, device_entry.id) + selects = [entry for entry in entries if entry.domain == Platform.SELECT] + assert len(selects) == 1 + + select = selects[0] + + mock_setup.api.get_full_status.return_value = { + "dayofweek": 3, + "hour": 2, + "min": 3, + "sec": 4, + } + await async_update_entity(hass, select.entity_id) + assert mock_setup.api.get_full_status.call_count == 2 + state = hass.states.get(select.entity_id) + assert state.state == "wednesday" + + # set value + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: select.entity_id, + ATTR_OPTION: "tuesday", + }, + blocking=True, + ) + state = hass.states.get(select.entity_id) + assert state.state == "tuesday" + assert mock_setup.api.set_time.call_count == 1 + call_args = mock_setup.api.set_time.call_args.kwargs + assert call_args == { + "hour": 2, + "minute": 3, + "second": 4, + "day": 2, + } diff --git a/tests/components/broadlink/test_time.py b/tests/components/broadlink/test_time.py new file mode 100644 index 00000000000..819954158bb --- /dev/null +++ b/tests/components/broadlink/test_time.py @@ -0,0 +1,67 @@ +"""Tests for Broadlink time.""" + +from homeassistant.components.broadlink.const import DOMAIN +from homeassistant.components.time import ( + ATTR_TIME, + DOMAIN as TIME_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers.entity_component import async_update_entity + +from . import get_device + + +async def test_time( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test Broadlink time.""" + await hass.config.async_set_time_zone("UTC") + + device = get_device("Guest room") + mock_setup = await device.setup_entry(hass) + + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_setup.entry.unique_id)} + ) + entries = er.async_entries_for_device(entity_registry, device_entry.id) + times = [entry for entry in entries if entry.domain == Platform.TIME] + assert len(times) == 1 + + time = times[0] + + mock_setup.api.get_full_status.return_value = { + "dayofweek": 3, + "hour": 2, + "min": 3, + "sec": 4, + } + await async_update_entity(hass, time.entity_id) + assert mock_setup.api.get_full_status.call_count == 2 + state = hass.states.get(time.entity_id) + assert state.state == "02:03:04+00:00" + + # set value + await hass.services.async_call( + TIME_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: time.entity_id, + ATTR_TIME: "03:04:05", + }, + blocking=True, + ) + state = hass.states.get(time.entity_id) + assert state.state == "03:04:05" + assert mock_setup.api.set_time.call_count == 1 + call_args = mock_setup.api.set_time.call_args.kwargs + assert call_args == { + "hour": 3, + "minute": 4, + "second": 5, + "day": 3, + } diff --git a/tests/components/caldav/test_todo.py b/tests/components/caldav/test_todo.py index 66f6e975453..69a49e0fcbe 100644 --- a/tests/components/caldav/test_todo.py +++ b/tests/components/caldav/test_todo.py @@ -8,8 +8,17 @@ from caldav.lib.error import DAVError, NotFoundError from caldav.objects import Todo import pytest -from homeassistant.components.todo import DOMAIN as TODO_DOMAIN -from homeassistant.const import Platform +from homeassistant.components.todo import ( + ATTR_DESCRIPTION, + ATTR_DUE_DATE, + ATTR_DUE_DATETIME, + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, + DOMAIN as TODO_DOMAIN, + TodoServices, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -226,12 +235,12 @@ async def test_supported_components( RESULT_ITEM, ), ( - {"due_date": "2023-11-18"}, + {ATTR_DUE_DATE: "2023-11-18"}, {"status": "NEEDS-ACTION", "summary": "Cheese", "due": date(2023, 11, 18)}, {**RESULT_ITEM, "due": "2023-11-18"}, ), ( - {"due_datetime": "2023-11-18T08:30:00-06:00"}, + {ATTR_DUE_DATETIME: "2023-11-18T08:30:00-06:00"}, { "status": "NEEDS-ACTION", "summary": "Cheese", @@ -240,7 +249,7 @@ async def test_supported_components( {**RESULT_ITEM, "due": "2023-11-18T08:30:00-06:00"}, ), ( - {"description": "Make sure to get Swiss"}, + {ATTR_DESCRIPTION: "Make sure to get Swiss"}, { "status": "NEEDS-ACTION", "summary": "Cheese", @@ -278,9 +287,9 @@ async def test_add_item( await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "Cheese", **item_data}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Cheese", **item_data}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -306,9 +315,9 @@ async def test_add_item_failure( with pytest.raises(HomeAssistantError, match="CalDAV save error"): await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "Cheese"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Cheese"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -317,7 +326,7 @@ async def test_add_item_failure( ("update_data", "expected_ics", "expected_state", "expected_item"), [ ( - {"rename": "Swiss Cheese"}, + {ATTR_RENAME: "Swiss Cheese"}, [ "DESCRIPTION:Any kind will do", "DUE;VALUE=DATE:20171126", @@ -334,7 +343,7 @@ async def test_add_item_failure( }, ), ( - {"status": "needs_action"}, + {ATTR_STATUS: "needs_action"}, [ "DESCRIPTION:Any kind will do", "DUE;VALUE=DATE:20171126", @@ -351,7 +360,7 @@ async def test_add_item_failure( }, ), ( - {"status": "completed"}, + {ATTR_STATUS: "completed"}, [ "DESCRIPTION:Any kind will do", "DUE;VALUE=DATE:20171126", @@ -368,7 +377,7 @@ async def test_add_item_failure( }, ), ( - {"rename": "Swiss Cheese", "status": "needs_action"}, + {ATTR_RENAME: "Swiss Cheese", ATTR_STATUS: "needs_action"}, [ "DESCRIPTION:Any kind will do", "DUE;VALUE=DATE:20171126", @@ -385,7 +394,7 @@ async def test_add_item_failure( }, ), ( - {"due_date": "2023-11-18"}, + {ATTR_DUE_DATE: "2023-11-18"}, [ "DESCRIPTION:Any kind will do", "DUE;VALUE=DATE:20231118", @@ -402,7 +411,7 @@ async def test_add_item_failure( }, ), ( - {"due_datetime": "2023-11-18T08:30:00-06:00"}, + {ATTR_DUE_DATETIME: "2023-11-18T08:30:00-06:00"}, [ "DESCRIPTION:Any kind will do", "DUE;TZID=America/Regina:20231118T083000", @@ -419,7 +428,7 @@ async def test_add_item_failure( }, ), ( - {"due_datetime": None}, + {ATTR_DUE_DATETIME: None}, [ "DESCRIPTION:Any kind will do", "STATUS:NEEDS-ACTION", @@ -434,7 +443,7 @@ async def test_add_item_failure( }, ), ( - {"description": "Make sure to get Swiss"}, + {ATTR_DESCRIPTION: "Make sure to get Swiss"}, [ "DESCRIPTION:Make sure to get Swiss", "DUE;VALUE=DATE:20171126", @@ -451,7 +460,7 @@ async def test_add_item_failure( }, ), ( - {"description": None}, + {ATTR_DESCRIPTION: None}, ["DUE;VALUE=DATE:20171126", "STATUS:NEEDS-ACTION", "SUMMARY:Cheese"], "1", { @@ -501,12 +510,12 @@ async def test_update_item( await hass.services.async_call( TODO_DOMAIN, - "update_item", + TodoServices.UPDATE_ITEM, { - "item": "Cheese", + ATTR_ITEM: "Cheese", **update_data, }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -520,9 +529,9 @@ async def test_update_item( result = await hass.services.async_call( TODO_DOMAIN, - "get_items", + TodoServices.GET_ITEMS, {}, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, return_response=True, ) @@ -548,12 +557,12 @@ async def test_update_item_failure( with pytest.raises(HomeAssistantError, match="CalDAV save error"): await hass.services.async_call( TODO_DOMAIN, - "update_item", + TodoServices.UPDATE_ITEM, { - "item": "Cheese", - "status": "completed", + ATTR_ITEM: "Cheese", + ATTR_STATUS: "completed", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -582,12 +591,12 @@ async def test_update_item_lookup_failure( with pytest.raises(HomeAssistantError, match=match): await hass.services.async_call( TODO_DOMAIN, - "update_item", + TodoServices.UPDATE_ITEM, { - "item": "Cheese", - "status": "completed", + ATTR_ITEM: "Cheese", + ATTR_STATUS: "completed", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -635,9 +644,9 @@ async def test_remove_item( await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": uids_to_delete}, - target={"entity_id": TEST_ENTITY}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: uids_to_delete}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -668,9 +677,9 @@ async def test_remove_item_lookup_failure( with pytest.raises(HomeAssistantError, match=match): await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": "Cheese"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: "Cheese"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -697,9 +706,9 @@ async def test_remove_item_failure( with pytest.raises(HomeAssistantError, match="CalDAV delete error"): await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": "Cheese"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: "Cheese"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -725,9 +734,9 @@ async def test_remove_item_not_found( with pytest.raises(HomeAssistantError, match="Could not find"): await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": "Cheese"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: "Cheese"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -779,12 +788,12 @@ async def test_subscribe( ] await hass.services.async_call( TODO_DOMAIN, - "update_item", + TodoServices.UPDATE_ITEM, { - "item": "Cheese", - "rename": "Milk", + ATTR_ITEM: "Cheese", + ATTR_RENAME: "Milk", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) diff --git a/tests/components/conftest.py b/tests/components/conftest.py index 1fe933dbe12..7d15bde88c0 100644 --- a/tests/components/conftest.py +++ b/tests/components/conftest.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Callable, Generator +from importlib.util import find_spec from pathlib import Path from typing import TYPE_CHECKING, Any from unittest.mock import MagicMock, patch @@ -20,9 +21,9 @@ if TYPE_CHECKING: from .switch.common import MockSwitch -@pytest.fixture(scope="session", autouse=True) +@pytest.fixture(scope="session", autouse=find_spec("zeroconf") is not None) def patch_zeroconf_multiple_catcher() -> Generator[None]: - """Patch zeroconf wrapper that detects if multiple instances are used.""" + """If installed, patch zeroconf wrapper that detects if multiple instances are used.""" with patch( "homeassistant.components.zeroconf.install_multiple_zeroconf_catcher", side_effect=lambda zc: None, @@ -123,9 +124,9 @@ def mock_conversation_agent_fixture(hass: HomeAssistant) -> MockAgent: return mock_conversation_agent_fixture_helper(hass) -@pytest.fixture(scope="session", autouse=True) +@pytest.fixture(scope="session", autouse=find_spec("ffmpeg") is not None) def prevent_ffmpeg_subprocess() -> Generator[None]: - """Prevent ffmpeg from creating a subprocess.""" + """If installed, prevent ffmpeg from creating a subprocess.""" with patch( "homeassistant.components.ffmpeg.FFVersion.get_version", return_value="6.0" ): diff --git a/tests/components/conversation/test_default_agent_intents.py b/tests/components/conversation/test_default_agent_intents.py index b1c4a6d51af..8be25136df4 100644 --- a/tests/components/conversation/test_default_agent_intents.py +++ b/tests/components/conversation/test_default_agent_intents.py @@ -1,7 +1,9 @@ """Test intents for the default agent.""" +from datetime import datetime from unittest.mock import patch +from freezegun import freeze_time import pytest from homeassistant.components import ( @@ -413,3 +415,28 @@ async def test_todo_add_item_fr( assert mock_handle.call_args.args intent_obj = mock_handle.call_args.args[0] assert intent_obj.slots.get("item", {}).get("value", "").strip() == "farine" + + +@freeze_time(datetime(year=2013, month=9, day=17, hour=1, minute=2)) +async def test_date_time( + hass: HomeAssistant, + init_components, +) -> None: + """Test the date and time intents.""" + result = await conversation.async_converse( + hass, "what is the date", None, Context(), None + ) + await hass.async_block_till_done() + + response = result.response + assert response.response_type == intent.IntentResponseType.ACTION_DONE + assert response.speech["plain"]["speech"] == "September 17th, 2013" + + result = await conversation.async_converse( + hass, "what time is it", None, Context(), None + ) + await hass.async_block_till_done() + + response = result.response + assert response.response_type == intent.IntentResponseType.ACTION_DONE + assert response.speech["plain"]["speech"] == "1:02 AM" diff --git a/tests/components/doorbird/test_config_flow.py b/tests/components/doorbird/test_config_flow.py index cd4ddccda87..17cfa05b49e 100644 --- a/tests/components/doorbird/test_config_flow.py +++ b/tests/components/doorbird/test_config_flow.py @@ -1,14 +1,19 @@ """Test the DoorBird config flow.""" from ipaddress import ip_address -from unittest.mock import MagicMock, Mock, patch +from unittest.mock import AsyncMock, MagicMock, Mock, patch +import aiohttp import pytest -import requests from homeassistant import config_entries from homeassistant.components import zeroconf -from homeassistant.components.doorbird.const import CONF_EVENTS, DOMAIN +from homeassistant.components.doorbird.const import ( + CONF_EVENTS, + DEFAULT_DOORBELL_EVENT, + DEFAULT_MOTION_EVENT, + DOMAIN, +) from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -23,20 +28,20 @@ VALID_CONFIG = { } -def _get_mock_doorbirdapi_return_values(ready=None, info=None): +def _get_mock_doorbirdapi_return_values(info=None): doorbirdapi_mock = MagicMock() - type(doorbirdapi_mock).ready = MagicMock(return_value=ready) - type(doorbirdapi_mock).info = MagicMock(return_value=info) - type(doorbirdapi_mock).doorbell_state = MagicMock( - side_effect=requests.exceptions.HTTPError(response=Mock(status_code=401)) + type(doorbirdapi_mock).info = AsyncMock(return_value=info) + type(doorbirdapi_mock).doorbell_state = AsyncMock( + side_effect=aiohttp.ClientResponseError( + request_info=Mock(), history=Mock(), status=401 + ) ) return doorbirdapi_mock -def _get_mock_doorbirdapi_side_effects(ready=None, info=None): +def _get_mock_doorbirdapi_side_effects(info=None): doorbirdapi_mock = MagicMock() - type(doorbirdapi_mock).ready = MagicMock(side_effect=ready) - type(doorbirdapi_mock).info = MagicMock(side_effect=info) + type(doorbirdapi_mock).info = AsyncMock(side_effect=info) return doorbirdapi_mock @@ -50,9 +55,7 @@ async def test_user_form(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - doorbirdapi = _get_mock_doorbirdapi_return_values( - ready=[True], info={"WIFI_MAC_ADDR": "macaddr"} - ) + doorbirdapi = _get_mock_doorbirdapi_return_values(info={"WIFI_MAC_ADDR": "macaddr"}) with ( patch( "homeassistant.components.doorbird.config_flow.DoorBird", @@ -80,6 +83,9 @@ async def test_user_form(hass: HomeAssistant) -> None: "password": "password", "username": "friend", } + assert result2["options"] == { + CONF_EVENTS: [DEFAULT_DOORBELL_EVENT, DEFAULT_MOTION_EVENT] + } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -174,9 +180,7 @@ async def test_form_zeroconf_non_ipv4_ignored(hass: HomeAssistant) -> None: async def test_form_zeroconf_correct_oui(hass: HomeAssistant) -> None: """Test we can setup from zeroconf with the correct OUI source.""" - doorbirdapi = _get_mock_doorbirdapi_return_values( - ready=[True], info={"WIFI_MAC_ADDR": "macaddr"} - ) + doorbirdapi = _get_mock_doorbirdapi_return_values(info={"WIFI_MAC_ADDR": "macaddr"}) with patch( "homeassistant.components.doorbird.config_flow.DoorBird", @@ -234,7 +238,7 @@ async def test_form_zeroconf_correct_oui(hass: HomeAssistant) -> None: @pytest.mark.parametrize( "doorbell_state_side_effect", [ - requests.exceptions.HTTPError(response=Mock(status_code=404)), + aiohttp.ClientResponseError(request_info=Mock(), history=Mock(), status=404), OSError, None, ], @@ -243,10 +247,8 @@ async def test_form_zeroconf_correct_oui_wrong_device( hass: HomeAssistant, doorbell_state_side_effect ) -> None: """Test we can setup from zeroconf with the correct OUI source but not a doorstation.""" - doorbirdapi = _get_mock_doorbirdapi_return_values( - ready=[True], info={"WIFI_MAC_ADDR": "macaddr"} - ) - type(doorbirdapi).doorbell_state = MagicMock(side_effect=doorbell_state_side_effect) + doorbirdapi = _get_mock_doorbirdapi_return_values(info={"WIFI_MAC_ADDR": "macaddr"}) + type(doorbirdapi).doorbell_state = AsyncMock(side_effect=doorbell_state_side_effect) with patch( "homeassistant.components.doorbird.config_flow.DoorBird", @@ -276,7 +278,7 @@ async def test_form_user_cannot_connect(hass: HomeAssistant) -> None: DOMAIN, context={"source": config_entries.SOURCE_USER} ) - doorbirdapi = _get_mock_doorbirdapi_side_effects(ready=OSError) + doorbirdapi = _get_mock_doorbirdapi_side_effects(info=OSError) with patch( "homeassistant.components.doorbird.config_flow.DoorBird", return_value=doorbirdapi, @@ -296,8 +298,10 @@ async def test_form_user_invalid_auth(hass: HomeAssistant) -> None: DOMAIN, context={"source": config_entries.SOURCE_USER} ) - mock_error = requests.exceptions.HTTPError(response=Mock(status_code=401)) - doorbirdapi = _get_mock_doorbirdapi_side_effects(ready=mock_error) + mock_error = aiohttp.ClientResponseError( + request_info=Mock(), history=Mock(), status=401 + ) + doorbirdapi = _get_mock_doorbirdapi_side_effects(info=mock_error) with patch( "homeassistant.components.doorbird.config_flow.DoorBird", return_value=doorbirdapi, @@ -336,3 +340,69 @@ async def test_options_flow(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.CREATE_ENTRY assert config_entry.options == {CONF_EVENTS: ["eventa", "eventc", "eventq"]} + + +async def test_reauth(hass: HomeAssistant) -> None: + """Test reauth flow.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_HOST: "1.1.1.1", + CONF_NAME: "DoorBird", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + config_entry.add_to_hass(hass) + config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + flows = hass.config_entries.flow.async_progress_by_handler(DOMAIN) + assert len(flows) == 1 + flow = flows[0] + + mock_error = aiohttp.ClientResponseError( + request_info=Mock(), history=Mock(), status=401 + ) + doorbirdapi = _get_mock_doorbirdapi_side_effects(info=mock_error) + with patch( + "homeassistant.components.doorbird.config_flow.DoorBird", + return_value=doorbirdapi, + ): + result2 = await hass.config_entries.flow.async_configure( + flow["flow_id"], + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "invalid_auth"} + + doorbirdapi = _get_mock_doorbirdapi_return_values(info={"WIFI_MAC_ADDR": "macaddr"}) + with ( + patch( + "homeassistant.components.doorbird.config_flow.DoorBird", + return_value=doorbirdapi, + ), + patch( + "homeassistant.components.doorbird.async_setup", return_value=True + ) as mock_setup, + patch( + "homeassistant.components.doorbird.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + result2 = await hass.config_entries.flow.async_configure( + flow["flow_id"], + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reauth_successful" + assert len(mock_setup_entry.mock_calls) == 1 + assert len(mock_setup.mock_calls) == 1 diff --git a/tests/components/dsmr/test_mbus_migration.py b/tests/components/dsmr/test_mbus_migration.py index 18f5e850ecd..a8b7ef9c356 100644 --- a/tests/components/dsmr/test_mbus_migration.py +++ b/tests/components/dsmr/test_mbus_migration.py @@ -9,7 +9,7 @@ from dsmr_parser.obis_references import ( BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, BELGIUM_MBUS1_METER_READING2, ) -from dsmr_parser.objects import CosemObject, MBusObject +from dsmr_parser.objects import CosemObject, MBusObject, Telegram from homeassistant.components.dsmr.const import DOMAIN from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN @@ -65,22 +65,31 @@ async def test_migrate_gas_to_mbus( assert entity.unique_id == old_unique_id await hass.async_block_till_done() - telegram = { - BELGIUM_MBUS1_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS1_DEVICE_TYPE, [{"value": "003", "unit": ""}] - ), - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + telegram = Telegram() + telegram.add( + BELGIUM_MBUS1_DEVICE_TYPE, + CosemObject((0, 0), [{"value": "003", "unit": ""}]), + "BELGIUM_MBUS1_DEVICE_TYPE", + ) + telegram.add( + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 1), [{"value": "37464C4F32313139303333373331", "unit": ""}], ), - BELGIUM_MBUS1_METER_READING2: MBusObject( - BELGIUM_MBUS1_METER_READING2, + "BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + BELGIUM_MBUS1_METER_READING2, + MBusObject( + (0, 1), [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": Decimal(745.695), "unit": "m3"}, ], ), - } + "BELGIUM_MBUS1_METER_READING2", + ) assert await hass.config_entries.async_setup(mock_entry.entry_id) await hass.async_block_till_done() @@ -173,22 +182,31 @@ async def test_migrate_gas_to_mbus_exists( ) await hass.async_block_till_done() - telegram = { - BELGIUM_MBUS1_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS1_DEVICE_TYPE, [{"value": "003", "unit": ""}] - ), - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + telegram = Telegram() + telegram.add( + BELGIUM_MBUS1_DEVICE_TYPE, + CosemObject((0, 0), [{"value": "003", "unit": ""}]), + "BELGIUM_MBUS1_DEVICE_TYPE", + ) + telegram.add( + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 1), [{"value": "37464C4F32313139303333373331", "unit": ""}], ), - BELGIUM_MBUS1_METER_READING2: MBusObject( - BELGIUM_MBUS1_METER_READING2, + "BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + BELGIUM_MBUS1_METER_READING2, + MBusObject( + (0, 1), [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": Decimal(745.695), "unit": "m3"}, ], ), - } + "BELGIUM_MBUS1_METER_READING2", + ) assert await hass.config_entries.async_setup(mock_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/dsmr/test_sensor.py b/tests/components/dsmr/test_sensor.py index 435594d4eef..a7c4a98be1e 100644 --- a/tests/components/dsmr/test_sensor.py +++ b/tests/components/dsmr/test_sensor.py @@ -37,7 +37,7 @@ from dsmr_parser.obis_references import ( GAS_METER_READING, HOURLY_GAS_METER_READING, ) -from dsmr_parser.objects import CosemObject, MBusObject +from dsmr_parser.objects import CosemObject, MBusObject, Telegram import pytest from homeassistant.components.sensor import ( @@ -80,22 +80,31 @@ async def test_default_setup( "time_between_update": 0, } - telegram = { - CURRENT_ELECTRICITY_USAGE: CosemObject( - CURRENT_ELECTRICITY_USAGE, + telegram = Telegram() + telegram.add( + CURRENT_ELECTRICITY_USAGE, + CosemObject( + (0, 0), [{"value": Decimal("0.0"), "unit": UnitOfPower.WATT}], ), - ELECTRICITY_ACTIVE_TARIFF: CosemObject( - ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] - ), - GAS_METER_READING: MBusObject( - GAS_METER_READING, + "CURRENT_ELECTRICITY_USAGE", + ) + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0001", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) + telegram.add( + GAS_METER_READING, + MBusObject( + (0, 0), [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": Decimal(745.695), "unit": UnitOfVolume.CUBIC_METERS}, ], ), - } + "GAS_METER_READING", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -134,22 +143,31 @@ async def test_default_setup( ) assert power_consumption.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "W" - telegram = { - CURRENT_ELECTRICITY_USAGE: CosemObject( - CURRENT_ELECTRICITY_USAGE, + telegram = Telegram() + telegram.add( + CURRENT_ELECTRICITY_USAGE, + CosemObject( + (0, 0), [{"value": Decimal("35.0"), "unit": UnitOfPower.WATT}], ), - ELECTRICITY_ACTIVE_TARIFF: CosemObject( - ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] - ), - GAS_METER_READING: MBusObject( - GAS_METER_READING, + "CURRENT_ELECTRICITY_USAGE", + ) + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0001", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) + telegram.add( + GAS_METER_READING, + MBusObject( + (0, 0), [ {"value": datetime.datetime.fromtimestamp(1551642214)}, {"value": Decimal(745.701), "unit": UnitOfVolume.CUBIC_METERS}, ], ), - } + "GAS_METER_READING", + ) # simulate a telegram pushed from the smartmeter and parsed by dsmr_parser telegram_callback(telegram) @@ -209,15 +227,20 @@ async def test_setup_only_energy( "time_between_update": 0, } - telegram = { - CURRENT_ELECTRICITY_USAGE: CosemObject( - CURRENT_ELECTRICITY_USAGE, + telegram = Telegram() + telegram.add( + CURRENT_ELECTRICITY_USAGE, + CosemObject( + (0, 0), [{"value": Decimal("35.0"), "unit": UnitOfPower.WATT}], ), - ELECTRICITY_ACTIVE_TARIFF: CosemObject( - ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] - ), - } + "CURRENT_ELECTRICITY_USAGE", + ) + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0001", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -260,18 +283,23 @@ async def test_v4_meter( "time_between_update": 0, } - telegram = { - HOURLY_GAS_METER_READING: MBusObject( - HOURLY_GAS_METER_READING, + telegram = Telegram() + telegram.add( + HOURLY_GAS_METER_READING, + MBusObject( + (0, 0), [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": Decimal(745.695), "unit": "m3"}, ], ), - ELECTRICITY_ACTIVE_TARIFF: CosemObject( - ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] - ), - } + "HOURLY_GAS_METER_READING", + ) + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0001", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -343,18 +371,23 @@ async def test_v5_meter( "time_between_update": 0, } - telegram = { - HOURLY_GAS_METER_READING: MBusObject( - HOURLY_GAS_METER_READING, + telegram = Telegram() + telegram.add( + HOURLY_GAS_METER_READING, + MBusObject( + (0, 0), [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": value, "unit": "m3"}, ], ), - ELECTRICITY_ACTIVE_TARIFF: CosemObject( - ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] - ), - } + "HOURLY_GAS_METER_READING", + ) + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0001", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -411,23 +444,34 @@ async def test_luxembourg_meter( "time_between_update": 0, } - telegram = { - HOURLY_GAS_METER_READING: MBusObject( - HOURLY_GAS_METER_READING, + telegram = Telegram() + telegram.add( + HOURLY_GAS_METER_READING, + MBusObject( + (0, 0), [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": Decimal(745.695), "unit": "m3"}, ], ), - ELECTRICITY_IMPORTED_TOTAL: CosemObject( - ELECTRICITY_IMPORTED_TOTAL, + "HOURLY_GAS_METER_READING", + ) + telegram.add( + ELECTRICITY_IMPORTED_TOTAL, + CosemObject( + (0, 0), [{"value": Decimal(123.456), "unit": UnitOfEnergy.KILO_WATT_HOUR}], ), - ELECTRICITY_EXPORTED_TOTAL: CosemObject( - ELECTRICITY_EXPORTED_TOTAL, + "ELECTRICITY_IMPORTED_TOTAL", + ) + telegram.add( + ELECTRICITY_EXPORTED_TOTAL, + CosemObject( + (0, 0), [{"value": Decimal(654.321), "unit": UnitOfEnergy.KILO_WATT_HOUR}], ), - } + "ELECTRICITY_EXPORTED_TOTAL", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -495,78 +539,127 @@ async def test_belgian_meter( "time_between_update": 0, } - telegram = { - BELGIUM_CURRENT_AVERAGE_DEMAND: CosemObject( - BELGIUM_CURRENT_AVERAGE_DEMAND, + telegram = Telegram() + telegram.add( + BELGIUM_CURRENT_AVERAGE_DEMAND, + CosemObject( + (0, 0), [{"value": Decimal(1.75), "unit": "kW"}], ), - BELGIUM_MAXIMUM_DEMAND_MONTH: MBusObject( - BELGIUM_MAXIMUM_DEMAND_MONTH, + "BELGIUM_CURRENT_AVERAGE_DEMAND", + ) + telegram.add( + BELGIUM_MAXIMUM_DEMAND_MONTH, + MBusObject( + (0, 0), [ {"value": datetime.datetime.fromtimestamp(1551642218)}, {"value": Decimal(4.11), "unit": "kW"}, ], ), - BELGIUM_MBUS1_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS1_DEVICE_TYPE, [{"value": "003", "unit": ""}] - ), - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + "BELGIUM_MAXIMUM_DEMAND_MONTH", + ) + telegram.add( + BELGIUM_MBUS1_DEVICE_TYPE, + CosemObject((0, 1), [{"value": "003", "unit": ""}]), + "BELGIUM_MBUS1_DEVICE_TYPE", + ) + telegram.add( + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 1), [{"value": "37464C4F32313139303333373331", "unit": ""}], ), - BELGIUM_MBUS1_METER_READING2: MBusObject( - BELGIUM_MBUS1_METER_READING2, + "BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + BELGIUM_MBUS1_METER_READING2, + MBusObject( + (0, 1), [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": Decimal(745.695), "unit": "m3"}, ], ), - BELGIUM_MBUS2_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS2_DEVICE_TYPE, [{"value": "007", "unit": ""}] - ), - BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER, + "BELGIUM_MBUS1_METER_READING2", + ) + telegram.add( + BELGIUM_MBUS2_DEVICE_TYPE, + CosemObject((0, 0), [{"value": "007", "unit": ""}]), + "BELGIUM_MBUS2_DEVICE_TYPE", + ) + telegram.add( + BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 2), [{"value": "37464C4F32313139303333373332", "unit": ""}], ), - BELGIUM_MBUS2_METER_READING1: MBusObject( - BELGIUM_MBUS2_METER_READING1, + "BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + BELGIUM_MBUS2_METER_READING1, + MBusObject( + (0, 2), [ {"value": datetime.datetime.fromtimestamp(1551642214)}, {"value": Decimal(678.695), "unit": "m3"}, ], ), - BELGIUM_MBUS3_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS3_DEVICE_TYPE, [{"value": "003", "unit": ""}] - ), - BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER, + "BELGIUM_MBUS2_METER_READING1", + ) + telegram.add( + BELGIUM_MBUS3_DEVICE_TYPE, + CosemObject((0, 0), [{"value": "003", "unit": ""}]), + "BELGIUM_MBUS3_DEVICE_TYPE", + ) + telegram.add( + BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 3), [{"value": "37464C4F32313139303333373333", "unit": ""}], ), - BELGIUM_MBUS3_METER_READING2: MBusObject( - BELGIUM_MBUS3_METER_READING2, + "BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + BELGIUM_MBUS3_METER_READING2, + MBusObject( + (0, 3), [ {"value": datetime.datetime.fromtimestamp(1551642215)}, {"value": Decimal(12.12), "unit": "m3"}, ], ), - BELGIUM_MBUS4_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS4_DEVICE_TYPE, [{"value": "007", "unit": ""}] - ), - BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER, + "BELGIUM_MBUS3_METER_READING2", + ) + telegram.add( + BELGIUM_MBUS4_DEVICE_TYPE, + CosemObject((0, 0), [{"value": "007", "unit": ""}]), + "BELGIUM_MBUS4_DEVICE_TYPE", + ) + telegram.add( + BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 4), [{"value": "37464C4F32313139303333373334", "unit": ""}], ), - BELGIUM_MBUS4_METER_READING1: MBusObject( - BELGIUM_MBUS4_METER_READING1, + "BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + BELGIUM_MBUS4_METER_READING1, + MBusObject( + (0, 4), [ {"value": datetime.datetime.fromtimestamp(1551642216)}, {"value": Decimal(13.13), "unit": "m3"}, ], ), - ELECTRICITY_ACTIVE_TARIFF: CosemObject( - ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] - ), - } + "BELGIUM_MBUS4_METER_READING1", + ) + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0001", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -680,64 +773,103 @@ async def test_belgian_meter_alt( "time_between_update": 0, } - telegram = { - BELGIUM_MBUS1_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS1_DEVICE_TYPE, [{"value": "007", "unit": ""}] - ), - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + telegram = Telegram() + telegram.add( + BELGIUM_MBUS1_DEVICE_TYPE, + CosemObject((0, 0), [{"value": "007", "unit": ""}]), + "BELGIUM_MBUS1_DEVICE_TYPE", + ) + telegram.add( + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 1), [{"value": "37464C4F32313139303333373331", "unit": ""}], ), - BELGIUM_MBUS1_METER_READING1: MBusObject( - BELGIUM_MBUS1_METER_READING1, + "BELGIUM_MBUS1_DEVICE_TYPE", + ) + telegram.add( + BELGIUM_MBUS1_METER_READING1, + MBusObject( + (0, 1), [ {"value": datetime.datetime.fromtimestamp(1551642215)}, {"value": Decimal(123.456), "unit": "m3"}, ], ), - BELGIUM_MBUS2_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS2_DEVICE_TYPE, [{"value": "003", "unit": ""}] - ), - BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER, + "BELGIUM_MBUS1_METER_READING1", + ) + telegram.add( + BELGIUM_MBUS2_DEVICE_TYPE, + CosemObject((0, 0), [{"value": "003", "unit": ""}]), + "BELGIUM_MBUS2_DEVICE_TYPE", + ) + telegram.add( + BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 2), [{"value": "37464C4F32313139303333373332", "unit": ""}], ), - BELGIUM_MBUS2_METER_READING2: MBusObject( - BELGIUM_MBUS2_METER_READING2, + "BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + BELGIUM_MBUS2_METER_READING2, + MBusObject( + (0, 2), [ {"value": datetime.datetime.fromtimestamp(1551642216)}, {"value": Decimal(678.901), "unit": "m3"}, ], ), - BELGIUM_MBUS3_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS3_DEVICE_TYPE, [{"value": "007", "unit": ""}] - ), - BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER, + BELGIUM_MBUS2_METER_READING2, + ) + telegram.add( + BELGIUM_MBUS3_DEVICE_TYPE, + CosemObject((0, 0), [{"value": "007", "unit": ""}]), + "BELGIUM_MBUS3_DEVICE_TYPE", + ) + telegram.add( + BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 3), [{"value": "37464C4F32313139303333373333", "unit": ""}], ), - BELGIUM_MBUS3_METER_READING1: MBusObject( - BELGIUM_MBUS3_METER_READING1, + "BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + BELGIUM_MBUS3_METER_READING1, + MBusObject( + (0, 3), [ {"value": datetime.datetime.fromtimestamp(1551642217)}, {"value": Decimal(12.12), "unit": "m3"}, ], ), - BELGIUM_MBUS4_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS4_DEVICE_TYPE, [{"value": "003", "unit": ""}] - ), - BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER, + "BELGIUM_MBUS3_METER_READING1", + ) + telegram.add( + BELGIUM_MBUS4_DEVICE_TYPE, + CosemObject((0, 0), [{"value": "003", "unit": ""}]), + "BELGIUM_MBUS4_DEVICE_TYPE", + ) + telegram.add( + BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 4), [{"value": "37464C4F32313139303333373334", "unit": ""}], ), - BELGIUM_MBUS4_METER_READING2: MBusObject( - BELGIUM_MBUS4_METER_READING2, + "BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + BELGIUM_MBUS4_METER_READING2, + MBusObject( + (0, 4), [ {"value": datetime.datetime.fromtimestamp(1551642218)}, {"value": Decimal(13.13), "unit": "m3"}, ], ), - } + "BELGIUM_MBUS4_METER_READING2", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -829,49 +961,78 @@ async def test_belgian_meter_mbus( "time_between_update": 0, } - telegram = { - ELECTRICITY_ACTIVE_TARIFF: CosemObject( - ELECTRICITY_ACTIVE_TARIFF, [{"value": "0003", "unit": ""}] - ), - BELGIUM_MBUS1_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS1_DEVICE_TYPE, [{"value": "006", "unit": ""}] - ), - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + telegram = Telegram() + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0003", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) + telegram.add( + BELGIUM_MBUS1_DEVICE_TYPE, + CosemObject((0, 0), [{"value": "006", "unit": ""}]), + "BELGIUM_MBUS1_DEVICE_TYPE", + ) + telegram.add( + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 1), [{"value": "37464C4F32313139303333373331", "unit": ""}], ), - BELGIUM_MBUS2_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS2_DEVICE_TYPE, [{"value": "003", "unit": ""}] - ), - BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER, + "BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + BELGIUM_MBUS2_DEVICE_TYPE, + CosemObject((0, 0), [{"value": "003", "unit": ""}]), + "BELGIUM_MBUS2_DEVICE_TYPE", + ) + telegram.add( + BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 2), [{"value": "37464C4F32313139303333373332", "unit": ""}], ), - BELGIUM_MBUS3_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS3_DEVICE_TYPE, [{"value": "007", "unit": ""}] - ), - BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER, + "BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + BELGIUM_MBUS3_DEVICE_TYPE, + CosemObject((0, 0), [{"value": "007", "unit": ""}]), + "BELGIUM_MBUS3_DEVICE_TYPE", + ) + telegram.add( + BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 3), [{"value": "37464C4F32313139303333373333", "unit": ""}], ), - BELGIUM_MBUS3_METER_READING2: MBusObject( - BELGIUM_MBUS3_METER_READING2, + "BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + BELGIUM_MBUS3_METER_READING2, + MBusObject( + (0, 3), [ {"value": datetime.datetime.fromtimestamp(1551642217)}, {"value": Decimal(12.12), "unit": "m3"}, ], ), - BELGIUM_MBUS4_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS4_DEVICE_TYPE, [{"value": "007", "unit": ""}] - ), - BELGIUM_MBUS4_METER_READING1: MBusObject( - BELGIUM_MBUS4_METER_READING1, + "BELGIUM_MBUS3_METER_READING2", + ) + telegram.add( + BELGIUM_MBUS4_DEVICE_TYPE, + CosemObject((0, 0), [{"value": "007", "unit": ""}]), + "BELGIUM_MBUS4_DEVICE_TYPE", + ) + telegram.add( + BELGIUM_MBUS4_METER_READING1, + MBusObject( + (0, 4), [ {"value": datetime.datetime.fromtimestamp(1551642218)}, {"value": Decimal(13.13), "unit": "m3"}, ], ), - } + "BELGIUM_MBUS4_METER_READING1", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -938,11 +1099,12 @@ async def test_belgian_meter_low( "time_between_update": 0, } - telegram = { - ELECTRICITY_ACTIVE_TARIFF: CosemObject( - ELECTRICITY_ACTIVE_TARIFF, [{"value": "0002", "unit": ""}] - ) - } + telegram = Telegram() + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0002", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -986,16 +1148,23 @@ async def test_swedish_meter( "time_between_update": 0, } - telegram = { - ELECTRICITY_IMPORTED_TOTAL: CosemObject( - ELECTRICITY_IMPORTED_TOTAL, + telegram = Telegram() + telegram.add( + ELECTRICITY_IMPORTED_TOTAL, + CosemObject( + (0, 0), [{"value": Decimal(123.456), "unit": UnitOfEnergy.KILO_WATT_HOUR}], ), - ELECTRICITY_EXPORTED_TOTAL: CosemObject( - ELECTRICITY_EXPORTED_TOTAL, + "ELECTRICITY_IMPORTED_TOTAL", + ) + telegram.add( + ELECTRICITY_EXPORTED_TOTAL, + CosemObject( + (0, 0), [{"value": Decimal(654.321), "unit": UnitOfEnergy.KILO_WATT_HOUR}], ), - } + "ELECTRICITY_EXPORTED_TOTAL", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -1054,16 +1223,23 @@ async def test_easymeter( "time_between_update": 0, } - telegram = { - ELECTRICITY_IMPORTED_TOTAL: CosemObject( - ELECTRICITY_IMPORTED_TOTAL, + telegram = Telegram() + telegram.add( + ELECTRICITY_IMPORTED_TOTAL, + CosemObject( + (0, 0), [{"value": Decimal(54184.6316), "unit": UnitOfEnergy.KILO_WATT_HOUR}], ), - ELECTRICITY_EXPORTED_TOTAL: CosemObject( - ELECTRICITY_EXPORTED_TOTAL, + "ELECTRICITY_IMPORTED_TOTAL", + ) + telegram.add( + ELECTRICITY_EXPORTED_TOTAL, + CosemObject( + (0, 0), [{"value": Decimal(19981.1069), "unit": UnitOfEnergy.KILO_WATT_HOUR}], ), - } + "ELECTRICITY_EXPORTED_TOTAL", + ) mock_entry = MockConfigEntry( domain="dsmr", @@ -1222,15 +1398,20 @@ async def test_reconnect( "time_between_update": 0, } - telegram = { - CURRENT_ELECTRICITY_USAGE: CosemObject( - CURRENT_ELECTRICITY_USAGE, + telegram = Telegram() + telegram.add( + CURRENT_ELECTRICITY_USAGE, + CosemObject( + (0, 0), [{"value": Decimal("35.0"), "unit": UnitOfPower.WATT}], ), - ELECTRICITY_ACTIVE_TARIFF: CosemObject( - ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] - ), - } + "CURRENT_ELECTRICITY_USAGE", + ) + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0001", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) # mock waiting coroutine while connection lasts closed = asyncio.Event() @@ -1300,15 +1481,18 @@ async def test_gas_meter_providing_energy_reading( "time_between_update": 0, } - telegram = { - GAS_METER_READING: MBusObject( - GAS_METER_READING, + telegram = Telegram() + telegram.add( + GAS_METER_READING, + MBusObject( + (0, 0), [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": Decimal(123.456), "unit": UnitOfEnergy.GIGA_JOULE}, ], ), - } + "GAS_METER_READING", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options diff --git a/tests/components/google_sheets/test_init.py b/tests/components/google_sheets/test_init.py index 014e89349e2..700783a2e30 100644 --- a/tests/components/google_sheets/test_init.py +++ b/tests/components/google_sheets/test_init.py @@ -214,6 +214,32 @@ async def test_append_sheet( assert len(mock_client.mock_calls) == 8 +async def test_append_sheet_multiple_rows( + hass: HomeAssistant, + setup_integration: ComponentSetup, + config_entry: MockConfigEntry, +) -> None: + """Test service call appending to a sheet.""" + await setup_integration() + + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 1 + assert entries[0].state is ConfigEntryState.LOADED + + with patch("homeassistant.components.google_sheets.Client") as mock_client: + await hass.services.async_call( + DOMAIN, + "append_sheet", + { + "config_entry": config_entry.entry_id, + "worksheet": "Sheet1", + "data": [{"foo": "bar"}, {"foo": "bar2"}], + }, + blocking=True, + ) + assert len(mock_client.mock_calls) == 8 + + async def test_append_sheet_api_error( hass: HomeAssistant, setup_integration: ComponentSetup, diff --git a/tests/components/google_tasks/test_todo.py b/tests/components/google_tasks/test_todo.py index afbaabe5cd0..b0ee135d4a9 100644 --- a/tests/components/google_tasks/test_todo.py +++ b/tests/components/google_tasks/test_todo.py @@ -10,8 +10,16 @@ from httplib2 import Response import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.todo import DOMAIN as TODO_DOMAIN -from homeassistant.const import Platform +from homeassistant.components.todo import ( + ATTR_DESCRIPTION, + ATTR_DUE_DATE, + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, + DOMAIN as TODO_DOMAIN, + TodoServices, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -376,8 +384,8 @@ async def test_task_items_error_response( ("api_responses", "item_data"), [ (CREATE_API_RESPONSES, {}), - (CREATE_API_RESPONSES, {"due_date": "2023-11-18"}), - (CREATE_API_RESPONSES, {"description": "6-pack"}), + (CREATE_API_RESPONSES, {ATTR_DUE_DATE: "2023-11-18"}), + (CREATE_API_RESPONSES, {ATTR_DESCRIPTION: "6-pack"}), ], ids=["summary", "due", "description"], ) @@ -399,9 +407,9 @@ async def test_create_todo_list_item( await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "Soda", **item_data}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Soda", **item_data}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) assert len(mock_http_response.call_args_list) == 4 @@ -439,9 +447,9 @@ async def test_create_todo_list_item_error( with pytest.raises(HomeAssistantError, match="Invalid task ID"): await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "Soda"}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Soda"}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) @@ -464,9 +472,9 @@ async def test_update_todo_list_item( await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "some-task-id", "rename": "Soda", "status": "completed"}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "some-task-id", ATTR_RENAME: "Soda", ATTR_STATUS: "completed"}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) assert len(mock_http_response.call_args_list) == 4 @@ -504,9 +512,9 @@ async def test_update_todo_list_item_error( with pytest.raises(HomeAssistantError, match="Invalid task ID"): await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "some-task-id", "rename": "Soda", "status": "completed"}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "some-task-id", ATTR_RENAME: "Soda", ATTR_STATUS: "completed"}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) @@ -514,12 +522,12 @@ async def test_update_todo_list_item_error( @pytest.mark.parametrize( ("api_responses", "item_data"), [ - (UPDATE_API_RESPONSES, {"rename": "Soda"}), - (UPDATE_API_RESPONSES, {"due_date": "2023-11-18"}), - (UPDATE_API_RESPONSES, {"due_date": None}), - (UPDATE_API_RESPONSES, {"description": "At least one gallon"}), - (UPDATE_API_RESPONSES, {"description": ""}), - (UPDATE_API_RESPONSES, {"description": None}), + (UPDATE_API_RESPONSES, {ATTR_RENAME: "Soda"}), + (UPDATE_API_RESPONSES, {ATTR_DUE_DATE: "2023-11-18"}), + (UPDATE_API_RESPONSES, {ATTR_DUE_DATE: None}), + (UPDATE_API_RESPONSES, {ATTR_DESCRIPTION: "At least one gallon"}), + (UPDATE_API_RESPONSES, {ATTR_DESCRIPTION: ""}), + (UPDATE_API_RESPONSES, {ATTR_DESCRIPTION: None}), ], ids=( "rename", @@ -548,9 +556,9 @@ async def test_partial_update( await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "some-task-id", **item_data}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "some-task-id", **item_data}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) assert len(mock_http_response.call_args_list) == 4 @@ -578,9 +586,9 @@ async def test_partial_update_status( await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "some-task-id", "status": "needs_action"}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "some-task-id", ATTR_STATUS: "needs_action"}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) assert len(mock_http_response.call_args_list) == 4 @@ -622,9 +630,9 @@ async def test_delete_todo_list_item( await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": ["some-task-id-1", "some-task-id-2", "some-task-id-3"]}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["some-task-id-1", "some-task-id-2", "some-task-id-3"]}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) assert len(mock_http_response.call_args_list) == 4 @@ -670,9 +678,9 @@ async def test_delete_partial_failure( with pytest.raises(HomeAssistantError, match="Invalid task ID"): await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": ["some-task-id-1", "some-task-id-2", "some-task-id-3"]}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["some-task-id-1", "some-task-id-2", "some-task-id-3"]}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) @@ -711,9 +719,9 @@ async def test_delete_invalid_json_response( with pytest.raises(HomeAssistantError, match="unexpected response"): await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": ["some-task-id-1"]}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["some-task-id-1"]}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) @@ -750,9 +758,9 @@ async def test_delete_server_error( with pytest.raises(HomeAssistantError, match="responded with error"): await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": ["some-task-id-1"]}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["some-task-id-1"]}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) @@ -942,9 +950,9 @@ async def test_susbcribe( # Rename item await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": uid, "rename": "Milk"}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: uid, ATTR_RENAME: "Milk"}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) diff --git a/tests/components/group/test_button.py b/tests/components/group/test_button.py new file mode 100644 index 00000000000..c3f4a720d53 --- /dev/null +++ b/tests/components/group/test_button.py @@ -0,0 +1,122 @@ +"""The tests for the group button platform.""" + +from freezegun.api import FrozenDateTimeFactory +import pytest + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.group import DOMAIN +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.setup import async_setup_component +from homeassistant.util import dt as dt_util + + +async def test_default_state( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test button group default state.""" + hass.states.async_set("button.notify_light", "2021-01-01T23:59:59.123+00:00") + await async_setup_component( + hass, + BUTTON_DOMAIN, + { + BUTTON_DOMAIN: { + "platform": DOMAIN, + "entities": ["button.notify_light", "button.self_destruct"], + "name": "Button group", + "unique_id": "unique_identifier", + } + }, + ) + await hass.async_block_till_done() + await hass.async_start() + await hass.async_block_till_done() + + state = hass.states.get("button.button_group") + assert state is not None + assert state.state == STATE_UNKNOWN + assert state.attributes.get(ATTR_ENTITY_ID) == [ + "button.notify_light", + "button.self_destruct", + ] + + entry = entity_registry.async_get("button.button_group") + assert entry + assert entry.unique_id == "unique_identifier" + + +async def test_state_reporting(hass: HomeAssistant) -> None: + """Test the state reporting. + + The group state is unavailable if all group members are unavailable. + Otherwise, the group state represents the last time the grouped button was pressed. + """ + await async_setup_component( + hass, + BUTTON_DOMAIN, + { + BUTTON_DOMAIN: { + "platform": DOMAIN, + "entities": ["button.test1", "button.test2"], + } + }, + ) + await hass.async_block_till_done() + await hass.async_start() + await hass.async_block_till_done() + + # Initial state with no group member in the state machine -> unavailable + assert hass.states.get("button.button_group").state == STATE_UNAVAILABLE + + # All group members unavailable -> unavailable + hass.states.async_set("button.test1", STATE_UNAVAILABLE) + hass.states.async_set("button.test2", STATE_UNAVAILABLE) + await hass.async_block_till_done() + assert hass.states.get("button.button_group").state == STATE_UNAVAILABLE + + # All group members available, but no group member pressed -> unknown + hass.states.async_set("button.test1", "2021-01-01T23:59:59.123+00:00") + hass.states.async_set("button.test2", "2022-02-02T23:59:59.123+00:00") + await hass.async_block_till_done() + assert hass.states.get("button.button_group").state == STATE_UNKNOWN + + +@pytest.mark.usefixtures("enable_custom_integrations") +async def test_service_calls( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: + """Test service calls.""" + await async_setup_component( + hass, + BUTTON_DOMAIN, + { + BUTTON_DOMAIN: [ + {"platform": "demo"}, + { + "platform": DOMAIN, + "entities": [ + "button.push", + "button.self_destruct", + ], + }, + ] + }, + ) + await hass.async_block_till_done() + + assert hass.states.get("button.button_group").state == STATE_UNKNOWN + assert hass.states.get("button.push").state == STATE_UNKNOWN + + now = dt_util.parse_datetime("2021-01-09 12:00:00+00:00") + freezer.move_to(now) + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.button_group"}, + blocking=True, + ) + + assert hass.states.get("button.button_group").state == now.isoformat() + assert hass.states.get("button.push").state == now.isoformat() diff --git a/tests/components/group/test_config_flow.py b/tests/components/group/test_config_flow.py index c6ee4ae5a87..dc40b647e2e 100644 --- a/tests/components/group/test_config_flow.py +++ b/tests/components/group/test_config_flow.py @@ -29,6 +29,7 @@ from tests.typing import WebSocketGenerator [ ("binary_sensor", "on", "on", {}, {}, {"all": False}, {}), ("binary_sensor", "on", "on", {}, {"all": True}, {"all": True}, {}), + ("button", STATE_UNKNOWN, "2021-01-01T23:59:59.123+00:00", {}, {}, {}, {}), ("cover", "open", "open", {}, {}, {}, {}), ( "event", @@ -135,6 +136,7 @@ async def test_config_flow( ("group_type", "extra_input"), [ ("binary_sensor", {"all": False}), + ("button", {}), ("cover", {}), ("event", {}), ("fan", {}), @@ -212,6 +214,7 @@ def get_suggested(schema, key): ("group_type", "member_state", "extra_options", "options_options"), [ ("binary_sensor", "on", {"all": False}, {}), + ("button", "2021-01-01T23:59:59.123+00:00", {}, {}), ("cover", "open", {}, {}), ("event", "2021-01-01T23:59:59.123+00:00", {}, {}), ("fan", "on", {}, {}), @@ -396,6 +399,7 @@ async def test_all_options( ("group_type", "extra_input"), [ ("binary_sensor", {"all": False}), + ("button", {}), ("cover", {}), ("event", {}), ("fan", {}), @@ -491,6 +495,7 @@ SENSOR_ATTRS = [{"icon": "mdi:calculator"}, {"max_entity_id": "sensor.input_two" ("domain", "extra_user_input", "input_states", "group_state", "extra_attributes"), [ ("binary_sensor", {"all": True}, ["on", "off"], "off", [{}, {}]), + ("button", {}, ["", ""], "unknown", [{}, {}]), ("cover", {}, ["open", "closed"], "open", COVER_ATTRS), ("event", {}, ["", ""], "unknown", EVENT_ATTRS), ("fan", {}, ["on", "off"], "on", FAN_ATTRS), @@ -600,6 +605,7 @@ async def test_config_flow_preview( ), [ ("binary_sensor", {"all": True}, {"all": False}, ["on", "off"], "on", [{}, {}]), + ("button", {}, {}, ["", ""], "unknown", [{}, {}]), ("cover", {}, {}, ["open", "closed"], "open", COVER_ATTRS), ("event", {}, {}, ["", ""], "unknown", EVENT_ATTRS), ("fan", {}, {}, ["on", "off"], "on", FAN_ATTRS), diff --git a/tests/components/history_stats/test_init.py b/tests/components/history_stats/test_init.py index 180bb67e02a..4cd999ba31c 100644 --- a/tests/components/history_stats/test_init.py +++ b/tests/components/history_stats/test_init.py @@ -2,9 +2,17 @@ from __future__ import annotations +from homeassistant.components.history_stats.const import ( + CONF_END, + CONF_START, + DEFAULT_NAME, + DOMAIN as HISTORY_STATS_DOMAIN, +) from homeassistant.components.recorder import Recorder from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_ENTITY_ID, CONF_NAME, CONF_STATE, CONF_TYPE from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er from tests.common import MockConfigEntry @@ -18,3 +26,93 @@ async def test_unload_entry( assert await hass.config_entries.async_unload(loaded_entry.entry_id) await hass.async_block_till_done() assert loaded_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_device_cleaning( + recorder_mock: Recorder, + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test the cleaning of devices linked to the helper History stats.""" + + # Source entity device config entry + source_config_entry = MockConfigEntry() + source_config_entry.add_to_hass(hass) + + # Device entry of the source entity + source_device1_entry = device_registry.async_get_or_create( + config_entry_id=source_config_entry.entry_id, + identifiers={("binary_sensor", "identifier_test1")}, + connections={("mac", "30:31:32:33:34:01")}, + ) + + # Source entity registry + source_entity = entity_registry.async_get_or_create( + "binary_sensor", + "test", + "source", + config_entry=source_config_entry, + device_id=source_device1_entry.id, + ) + await hass.async_block_till_done() + assert entity_registry.async_get("binary_sensor.test_source") is not None + + # Configure the configuration entry for History stats + history_stats_config_entry = MockConfigEntry( + data={}, + domain=HISTORY_STATS_DOMAIN, + options={ + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "binary_sensor.test_source", + CONF_STATE: ["on"], + CONF_TYPE: "count", + CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", + CONF_END: "{{ utcnow() }}", + }, + title="History stats", + ) + history_stats_config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(history_stats_config_entry.entry_id) + await hass.async_block_till_done() + + # Confirm the link between the source entity device and the History stats sensor + history_stats_entity = entity_registry.async_get("sensor.history_stats") + assert history_stats_entity is not None + assert history_stats_entity.device_id == source_entity.device_id + + # Device entry incorrectly linked to History stats config entry + device_registry.async_get_or_create( + config_entry_id=history_stats_config_entry.entry_id, + identifiers={("sensor", "identifier_test2")}, + connections={("mac", "30:31:32:33:34:02")}, + ) + device_registry.async_get_or_create( + config_entry_id=history_stats_config_entry.entry_id, + identifiers={("sensor", "identifier_test3")}, + connections={("mac", "30:31:32:33:34:03")}, + ) + await hass.async_block_till_done() + + # Before reloading the config entry, two devices are expected to be linked + devices_before_reload = device_registry.devices.get_devices_for_config_entry_id( + history_stats_config_entry.entry_id + ) + assert len(devices_before_reload) == 3 + + # Config entry reload + await hass.config_entries.async_reload(history_stats_config_entry.entry_id) + await hass.async_block_till_done() + + # Confirm the link between the source entity device and the History stats sensor + history_stats_entity = entity_registry.async_get("sensor.history_stats") + assert history_stats_entity is not None + assert history_stats_entity.device_id == source_entity.device_id + + # After reloading the config entry, only one linked device is expected + devices_after_reload = device_registry.devices.get_devices_for_config_entry_id( + history_stats_config_entry.entry_id + ) + assert len(devices_after_reload) == 1 + + assert devices_after_reload[0].id == source_device1_entry.id diff --git a/tests/components/history_stats/test_sensor.py b/tests/components/history_stats/test_sensor.py index 870c98503b4..f86c04b3e5b 100644 --- a/tests/components/history_stats/test_sensor.py +++ b/tests/components/history_stats/test_sensor.py @@ -8,15 +8,28 @@ import pytest import voluptuous as vol from homeassistant import config as hass_config -from homeassistant.components.history_stats.const import DOMAIN +from homeassistant.components.history_stats.const import ( + CONF_END, + CONF_START, + DEFAULT_NAME, + DOMAIN, +) from homeassistant.components.history_stats.sensor import ( PLATFORM_SCHEMA as SENSOR_SCHEMA, ) from homeassistant.components.recorder import Recorder -from homeassistant.const import ATTR_DEVICE_CLASS, SERVICE_RELOAD, STATE_UNKNOWN +from homeassistant.const import ( + ATTR_DEVICE_CLASS, + CONF_ENTITY_ID, + CONF_NAME, + CONF_STATE, + CONF_TYPE, + SERVICE_RELOAD, + STATE_UNKNOWN, +) import homeassistant.core as ha from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_component import async_update_entity from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util @@ -1736,3 +1749,50 @@ async def test_unique_id( entity_registry.async_get("sensor.test").unique_id == "some_history_stats_unique_id" ) + + +async def test_device_id( + recorder_mock: Recorder, + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test for source entity device for History stats.""" + source_config_entry = MockConfigEntry() + source_config_entry.add_to_hass(hass) + source_device_entry = device_registry.async_get_or_create( + config_entry_id=source_config_entry.entry_id, + identifiers={("sensor", "identifier_test")}, + connections={("mac", "30:31:32:33:34:35")}, + ) + source_entity = entity_registry.async_get_or_create( + "binary_sensor", + "test", + "source", + config_entry=source_config_entry, + device_id=source_device_entry.id, + ) + await hass.async_block_till_done() + assert entity_registry.async_get("binary_sensor.test_source") is not None + + history_stats_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "binary_sensor.test_source", + CONF_STATE: ["on"], + CONF_TYPE: "count", + CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", + CONF_END: "{{ utcnow() }}", + }, + title="History stats", + ) + history_stats_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(history_stats_config_entry.entry_id) + await hass.async_block_till_done() + + history_stats_entity = entity_registry.async_get("sensor.history_stats") + assert history_stats_entity is not None + assert history_stats_entity.device_id == source_entity.device_id diff --git a/tests/components/homekit/test_type_cameras.py b/tests/components/homekit/test_type_cameras.py index 69f76006163..a32656e9f2b 100644 --- a/tests/components/homekit/test_type_cameras.py +++ b/tests/components/homekit/test_type_cameras.py @@ -31,7 +31,13 @@ from homeassistant.components.homekit.const import ( ) from homeassistant.components.homekit.type_cameras import Camera from homeassistant.components.homekit.type_switches import Switch -from homeassistant.const import ATTR_DEVICE_CLASS, STATE_OFF, STATE_ON, STATE_UNKNOWN +from homeassistant.const import ( + ATTR_DEVICE_CLASS, + STATE_OFF, + STATE_ON, + STATE_UNAVAILABLE, + STATE_UNKNOWN, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component @@ -883,6 +889,54 @@ async def test_camera_with_linked_motion_event(hass: HomeAssistant, run_driver) await hass.async_block_till_done() assert char.value is False + # Ensure re-adding does not fire an event + hass.states.async_set( + motion_entity_id, + dt_util.utcnow().isoformat(), + {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION, "other": "attr"}, + ) + await hass.async_block_till_done() + assert not broker.mock_calls + + # But a second update does + broker.reset_mock() + hass.states.async_set( + motion_entity_id, + dt_util.utcnow().isoformat(), + {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION}, + ) + await hass.async_block_till_done() + assert broker.mock_calls + + # Now go unavailable + broker.reset_mock() + hass.states.async_set( + motion_entity_id, + STATE_UNAVAILABLE, + {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION}, + ) + await hass.async_block_till_done() + assert not broker.mock_calls + + # Going from unavailable to a state should not fire an event + hass.states.async_set( + motion_entity_id, + dt_util.utcnow().isoformat(), + {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION}, + ) + await hass.async_block_till_done() + assert not broker.mock_calls + + # But a another update does + broker.reset_mock() + hass.states.async_set( + motion_entity_id, + dt_util.utcnow().isoformat(), + {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION, "other": "attr"}, + ) + await hass.async_block_till_done() + assert broker.mock_calls + async def test_camera_with_a_missing_linked_motion_sensor( hass: HomeAssistant, run_driver @@ -1148,6 +1202,35 @@ async def test_camera_with_linked_doorbell_event( assert char.value is None assert char2.value is None + await hass.async_block_till_done() + hass.states.async_set( + doorbell_entity_id, + STATE_UNAVAILABLE, + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + ) + await hass.async_block_till_done() + # Ensure re-adding does not fire an event + assert not broker.mock_calls + broker.reset_mock() + + # going from unavailable to a state should not fire an event + hass.states.async_set( + doorbell_entity_id, + dt_util.utcnow().isoformat(), + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + ) + await hass.async_block_till_done() + assert not broker.mock_calls + + # But a second update does + hass.states.async_set( + doorbell_entity_id, + dt_util.utcnow().isoformat(), + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + ) + await hass.async_block_till_done() + assert broker.mock_calls + async def test_camera_with_a_missing_linked_doorbell_sensor( hass: HomeAssistant, run_driver diff --git a/tests/components/israel_rail/__init__.py b/tests/components/israel_rail/__init__.py new file mode 100644 index 00000000000..23cf9f5a821 --- /dev/null +++ b/tests/components/israel_rail/__init__.py @@ -0,0 +1,28 @@ +"""Tests for the israel_rail component.""" + +from datetime import timedelta + +from freezegun.api import FrozenDateTimeFactory + +from homeassistant.components.israel_rail.const import DEFAULT_SCAN_INTERVAL +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry, async_fire_time_changed + + +async def init_integration( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Set up the israel rail integration in Home Assistant.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + +async def goto_future(hass: HomeAssistant, freezer: FrozenDateTimeFactory): + """Move to future.""" + freezer.tick(DEFAULT_SCAN_INTERVAL + timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() diff --git a/tests/components/israel_rail/conftest.py b/tests/components/israel_rail/conftest.py new file mode 100644 index 00000000000..ba90cebe0a0 --- /dev/null +++ b/tests/components/israel_rail/conftest.py @@ -0,0 +1,137 @@ +"""Configuration for Israel rail tests.""" + +from datetime import datetime +from unittest.mock import AsyncMock, patch +from zoneinfo import ZoneInfo + +from israelrailapi.api import TrainRoute +import pytest +from typing_extensions import Generator + +from homeassistant.components.israel_rail import CONF_DESTINATION, CONF_START, DOMAIN + +from tests.common import MockConfigEntry + +VALID_CONFIG = { + CONF_START: "באר יעקב", + CONF_DESTINATION: "אשקלון", +} + +SOURCE_DEST = "באר יעקב אשקלון" + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.israel_rail.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + domain=DOMAIN, + data=VALID_CONFIG, + unique_id=SOURCE_DEST, + ) + + +@pytest.fixture +def mock_israelrail() -> AsyncMock: + """Build a fixture for the Israel rail API.""" + with ( + patch( + "homeassistant.components.israel_rail.TrainSchedule", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.israel_rail.config_flow.TrainSchedule", + new=mock_client, + ), + ): + client = mock_client.return_value + client.query.return_value = TRAINS + + yield client + + +def get_time(hour: int, minute: int) -> str: + """Return a time in isoformat.""" + return datetime(2021, 10, 10, hour, minute, 10, tzinfo=ZoneInfo("UTC")).isoformat() + + +def get_train_route( + train_number: str = "1234", + departure_time: str = "2021-10-10T10:10:10", + arrival_time: str = "2021-10-10T10:10:10", + origin_platform: str = "1", + dest_platform: str = "2", + origin_station: str = "3500", + destination_station: str = "3700", +) -> TrainRoute: + """Build a TrainRoute of the israelrail API.""" + return TrainRoute( + [ + { + "orignStation": origin_station, + "destinationStation": destination_station, + "departureTime": departure_time, + "arrivalTime": arrival_time, + "originPlatform": origin_platform, + "destPlatform": dest_platform, + "trainNumber": train_number, + } + ] + ) + + +TRAINS = [ + get_train_route( + train_number="1234", + departure_time=get_time(10, 10), + arrival_time=get_time(10, 30), + origin_platform="1", + dest_platform="2", + origin_station="3500", + destination_station="3700", + ), + get_train_route( + train_number="1235", + departure_time=get_time(10, 20), + arrival_time=get_time(10, 40), + origin_platform="1", + dest_platform="2", + origin_station="3500", + destination_station="3700", + ), + get_train_route( + train_number="1236", + departure_time=get_time(10, 30), + arrival_time=get_time(10, 50), + origin_platform="1", + dest_platform="2", + origin_station="3500", + destination_station="3700", + ), + get_train_route( + train_number="1237", + departure_time=get_time(10, 40), + arrival_time=get_time(11, 00), + origin_platform="1", + dest_platform="2", + origin_station="3500", + destination_station="3700", + ), + get_train_route( + train_number="1238", + departure_time=get_time(10, 50), + arrival_time=get_time(11, 10), + origin_platform="1", + dest_platform="2", + origin_station="3500", + destination_station="3700", + ), +] diff --git a/tests/components/israel_rail/snapshots/test_sensor.ambr b/tests/components/israel_rail/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..9806ecb1fae --- /dev/null +++ b/tests/components/israel_rail/snapshots/test_sensor.ambr @@ -0,0 +1,571 @@ +# serializer version: 1 +# name: test_valid_config[sensor.mock_title_departure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_departure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Departure', + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'departure0', + 'unique_id': 'באר יעקב אשקלון_departure', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_departure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Departure', + }), + 'context': , + 'entity_id': 'sensor.mock_title_departure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-10-10T10:10:10+00:00', + }) +# --- +# name: test_valid_config[sensor.mock_title_departure_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_departure_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Departure +1', + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'departure1', + 'unique_id': 'באר יעקב אשקלון_departure1', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_departure_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Departure +1', + }), + 'context': , + 'entity_id': 'sensor.mock_title_departure_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-10-10T10:20:10+00:00', + }) +# --- +# name: test_valid_config[sensor.mock_title_departure_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_departure_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Departure +2', + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'departure2', + 'unique_id': 'באר יעקב אשקלון_departure2', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_departure_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Departure +2', + }), + 'context': , + 'entity_id': 'sensor.mock_title_departure_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-10-10T10:30:10+00:00', + }) +# --- +# name: test_valid_config[sensor.mock_title_none-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_none', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'platform', + 'unique_id': 'באר יעקב אשקלון_platform', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_none-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'friendly_name': 'Mock Title None', + }), + 'context': , + 'entity_id': 'sensor.mock_title_none', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_valid_config[sensor.mock_title_none_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_none_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'trains', + 'unique_id': 'באר יעקב אשקלון_trains', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_none_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'friendly_name': 'Mock Title None', + }), + 'context': , + 'entity_id': 'sensor.mock_title_none_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_valid_config[sensor.mock_title_none_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_none_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'train_number', + 'unique_id': 'באר יעקב אשקלון_train_number', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_none_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'friendly_name': 'Mock Title None', + }), + 'context': , + 'entity_id': 'sensor.mock_title_none_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1234', + }) +# --- +# name: test_valid_config[sensor.mock_title_platform-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_platform', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Platform', + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'platform', + 'unique_id': 'באר יעקב אשקלון_platform', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_platform-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'friendly_name': 'Mock Title Platform', + }), + 'context': , + 'entity_id': 'sensor.mock_title_platform', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_valid_config[sensor.mock_title_timestamp-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_timestamp', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Timestamp', + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'departure0', + 'unique_id': 'באר יעקב אשקלון_departure', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_timestamp-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Timestamp', + }), + 'context': , + 'entity_id': 'sensor.mock_title_timestamp', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-10-10T10:10:10+00:00', + }) +# --- +# name: test_valid_config[sensor.mock_title_timestamp_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_timestamp_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Timestamp', + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'departure1', + 'unique_id': 'באר יעקב אשקלון_departure1', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_timestamp_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Timestamp', + }), + 'context': , + 'entity_id': 'sensor.mock_title_timestamp_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-10-10T10:20:10+00:00', + }) +# --- +# name: test_valid_config[sensor.mock_title_timestamp_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_timestamp_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Timestamp', + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'departure2', + 'unique_id': 'באר יעקב אשקלון_departure2', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_timestamp_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Timestamp', + }), + 'context': , + 'entity_id': 'sensor.mock_title_timestamp_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-10-10T10:30:10+00:00', + }) +# --- +# name: test_valid_config[sensor.mock_title_train_number-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_train_number', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Train number', + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'train_number', + 'unique_id': 'באר יעקב אשקלון_train_number', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_train_number-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'friendly_name': 'Mock Title Train number', + }), + 'context': , + 'entity_id': 'sensor.mock_title_train_number', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1234', + }) +# --- +# name: test_valid_config[sensor.mock_title_trains-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_trains', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Trains', + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'trains', + 'unique_id': 'באר יעקב אשקלון_trains', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_trains-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'friendly_name': 'Mock Title Trains', + }), + 'context': , + 'entity_id': 'sensor.mock_title_trains', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- diff --git a/tests/components/israel_rail/test_config_flow.py b/tests/components/israel_rail/test_config_flow.py new file mode 100644 index 00000000000..a27d9b3420b --- /dev/null +++ b/tests/components/israel_rail/test_config_flow.py @@ -0,0 +1,87 @@ +"""Define tests for the israel rail config flow.""" + +from unittest.mock import AsyncMock + +from homeassistant.components.israel_rail import CONF_DESTINATION, CONF_START, DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .conftest import VALID_CONFIG + +from tests.common import MockConfigEntry + + +async def test_create_entry( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_israelrail: AsyncMock +) -> None: + """Test that the user step works.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + VALID_CONFIG, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "באר יעקב אשקלון" + assert result["data"] == { + CONF_START: "באר יעקב", + CONF_DESTINATION: "אשקלון", + } + + +async def test_flow_fails( + hass: HomeAssistant, + mock_israelrail: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test that the user step fails.""" + mock_israelrail.query.side_effect = Exception("error") + failed_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data=VALID_CONFIG, + ) + + assert failed_result["errors"] == {"base": "unknown"} + assert failed_result["type"] is FlowResultType.FORM + + mock_israelrail.query.side_effect = None + + result = await hass.config_entries.flow.async_configure( + failed_result["flow_id"], + VALID_CONFIG, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "באר יעקב אשקלון" + assert result["data"] == { + CONF_START: "באר יעקב", + CONF_DESTINATION: "אשקלון", + } + + +async def test_flow_already_configured( + hass: HomeAssistant, + mock_israelrail: AsyncMock, + mock_config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, +) -> None: + """Test that the user step fails when the entry is already configured.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + result_aborted = await hass.config_entries.flow.async_configure( + result["flow_id"], + VALID_CONFIG, + ) + + assert result_aborted["type"] is FlowResultType.ABORT + assert result_aborted["reason"] == "already_configured" diff --git a/tests/components/israel_rail/test_init.py b/tests/components/israel_rail/test_init.py new file mode 100644 index 00000000000..c4dd4e5721e --- /dev/null +++ b/tests/components/israel_rail/test_init.py @@ -0,0 +1,22 @@ +"""Test init of israel_rail integration.""" + +from unittest.mock import AsyncMock + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import init_integration + +from tests.common import MockConfigEntry + + +async def test_invalid_config( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_israelrail: AsyncMock, +) -> None: + """Ensure nothing is created when config is wrong.""" + mock_israelrail.query.side_effect = Exception("error") + await init_integration(hass, mock_config_entry) + assert not hass.states.async_entity_ids("sensor") + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/israel_rail/test_sensor.py b/tests/components/israel_rail/test_sensor.py new file mode 100644 index 00000000000..d044dfe1d7c --- /dev/null +++ b/tests/components/israel_rail/test_sensor.py @@ -0,0 +1,69 @@ +"""Tests for the israel_rail sensor.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +from syrupy import SnapshotAssertion + +from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import goto_future, init_integration +from .conftest import TRAINS, get_time + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_valid_config( + hass: HomeAssistant, + mock_israelrail: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Ensure everything starts correctly.""" + await init_integration(hass, mock_config_entry) + assert len(hass.states.async_entity_ids()) == 6 + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_update_train( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_israelrail: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Ensure the train data is updated.""" + await init_integration(hass, mock_config_entry) + assert len(hass.states.async_entity_ids()) == 6 + departure_sensor = hass.states.get("sensor.mock_title_departure") + expected_time = get_time(10, 10) + assert departure_sensor.state == expected_time + + mock_israelrail.query.return_value = TRAINS[1:] + + await goto_future(hass, freezer) + + assert len(hass.states.async_entity_ids()) == 6 + departure_sensor = hass.states.get("sensor.mock_title_departure") + expected_time = get_time(10, 20) + assert departure_sensor.state == expected_time + + +async def test_fail_query( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_israelrail: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Ensure the integration handles query failures.""" + await init_integration(hass, mock_config_entry) + assert len(hass.states.async_entity_ids()) == 6 + mock_israelrail.query.side_effect = Exception("error") + await goto_future(hass, freezer) + assert len(hass.states.async_entity_ids()) == 6 + departure_sensor = hass.states.get("sensor.mock_title_departure") + assert departure_sensor.state == STATE_UNAVAILABLE diff --git a/tests/components/jewish_calendar/test_config_flow.py b/tests/components/jewish_calendar/test_config_flow.py index 3189571a5a7..466d3a1e4f0 100644 --- a/tests/components/jewish_calendar/test_config_flow.py +++ b/tests/components/jewish_calendar/test_config_flow.py @@ -9,6 +9,7 @@ from homeassistant.components.jewish_calendar.const import ( CONF_CANDLE_LIGHT_MINUTES, CONF_DIASPORA, CONF_HAVDALAH_OFFSET_MINUTES, + DEFAULT_CANDLE_LIGHT, DEFAULT_DIASPORA, DEFAULT_LANGUAGE, DOMAIN, @@ -138,3 +139,28 @@ async def test_options(hass: HomeAssistant, mock_config_entry: MockConfigEntry) assert len(entries) == 1 assert entries[0].options[CONF_CANDLE_LIGHT_MINUTES] == 25 assert entries[0].options[CONF_HAVDALAH_OFFSET_MINUTES] == 34 + + +async def test_options_reconfigure( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test that updating the options of the Jewish Calendar integration triggers a value update.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + assert CONF_CANDLE_LIGHT_MINUTES not in mock_config_entry.options + + # Update the CONF_CANDLE_LIGHT_MINUTES option to a new value + result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_CANDLE_LIGHT_MINUTES: DEFAULT_CANDLE_LIGHT + 1, + }, + ) + assert result["result"] + + # The value of the "upcoming_shabbat_candle_lighting" sensor should be the new value + assert ( + mock_config_entry.options[CONF_CANDLE_LIGHT_MINUTES] == DEFAULT_CANDLE_LIGHT + 1 + ) diff --git a/tests/components/knocki/test_config_flow.py b/tests/components/knocki/test_config_flow.py index baf43c3ad30..188175035da 100644 --- a/tests/components/knocki/test_config_flow.py +++ b/tests/components/knocki/test_config_flow.py @@ -2,7 +2,7 @@ from unittest.mock import AsyncMock -from knocki import KnockiConnectionError +from knocki import KnockiConnectionError, KnockiInvalidAuthError import pytest from homeassistant.components.knocki.const import DOMAIN @@ -72,7 +72,11 @@ async def test_duplcate_entry( @pytest.mark.parametrize(("field"), ["login", "link"]) @pytest.mark.parametrize( ("exception", "error"), - [(KnockiConnectionError, "cannot_connect"), (Exception, "unknown")], + [ + (KnockiConnectionError, "cannot_connect"), + (KnockiInvalidAuthError, "invalid_auth"), + (Exception, "unknown"), + ], ) async def test_exceptions( hass: HomeAssistant, diff --git a/tests/components/lifx/__init__.py b/tests/components/lifx/__init__.py index 505d212a352..4834e486ec0 100644 --- a/tests/components/lifx/__init__.py +++ b/tests/components/lifx/__init__.py @@ -172,6 +172,19 @@ def _mocked_tile() -> Light: bulb.effect = {"effect": "OFF"} bulb.get_tile_effect = MockLifxCommand(bulb) bulb.set_tile_effect = MockLifxCommand(bulb) + bulb.get64 = MockLifxCommand(bulb) + bulb.get_device_chain = MockLifxCommand(bulb) + return bulb + + +def _mocked_ceiling() -> Light: + bulb = _mocked_bulb() + bulb.product = 176 # LIFX Ceiling + bulb.effect = {"effect": "OFF"} + bulb.get_tile_effect = MockLifxCommand(bulb) + bulb.set_tile_effect = MockLifxCommand(bulb) + bulb.get64 = MockLifxCommand(bulb) + bulb.get_device_chain = MockLifxCommand(bulb) return bulb diff --git a/tests/components/lifx/test_light.py b/tests/components/lifx/test_light.py index 56630053cc0..9972bc1021a 100644 --- a/tests/components/lifx/test_light.py +++ b/tests/components/lifx/test_light.py @@ -11,15 +11,19 @@ from homeassistant.components.lifx import DOMAIN from homeassistant.components.lifx.const import ATTR_POWER from homeassistant.components.lifx.light import ATTR_INFRARED, ATTR_ZONES from homeassistant.components.lifx.manager import ( + ATTR_CLOUD_SATURATION_MAX, + ATTR_CLOUD_SATURATION_MIN, ATTR_DIRECTION, ATTR_PALETTE, ATTR_SATURATION_MAX, ATTR_SATURATION_MIN, + ATTR_SKY_TYPE, ATTR_SPEED, ATTR_THEME, SERVICE_EFFECT_COLORLOOP, SERVICE_EFFECT_MORPH, SERVICE_EFFECT_MOVE, + SERVICE_EFFECT_SKY, ) from homeassistant.components.light import ( ATTR_BRIGHTNESS, @@ -62,6 +66,7 @@ from . import ( _mocked_brightness_bulb, _mocked_bulb, _mocked_bulb_new_firmware, + _mocked_ceiling, _mocked_clean_bulb, _mocked_light_strip, _mocked_tile, @@ -691,6 +696,7 @@ async def test_matrix_flame_morph_effects(hass: HomeAssistant) -> None: entity_id = "light.my_bulb" + # FLAME effect test await hass.services.async_call( LIGHT_DOMAIN, "turn_on", @@ -707,11 +713,15 @@ async def test_matrix_flame_morph_effects(hass: HomeAssistant) -> None: "effect": 3, "speed": 3, "palette": [], + "sky_type": None, + "cloud_saturation_min": None, + "cloud_saturation_max": None, } bulb.get_tile_effect.reset_mock() bulb.set_tile_effect.reset_mock() bulb.set_power.reset_mock() + # MORPH effect tests bulb.power_level = 0 await hass.services.async_call( DOMAIN, @@ -750,6 +760,9 @@ async def test_matrix_flame_morph_effects(hass: HomeAssistant) -> None: (8920, 65535, 32768, 3500), (10558, 65535, 32768, 3500), ], + "sky_type": None, + "cloud_saturation_min": None, + "cloud_saturation_max": None, } bulb.get_tile_effect.reset_mock() bulb.set_tile_effect.reset_mock() @@ -808,6 +821,140 @@ async def test_matrix_flame_morph_effects(hass: HomeAssistant) -> None: (43690, 65535, 65535, 3500), (54613, 65535, 65535, 3500), ], + "sky_type": None, + "cloud_saturation_min": None, + "cloud_saturation_max": None, + } + bulb.get_tile_effect.reset_mock() + bulb.set_tile_effect.reset_mock() + bulb.set_power.reset_mock() + + +@pytest.mark.usefixtures("mock_discovery") +async def test_sky_effect(hass: HomeAssistant) -> None: + """Test the firmware sky effect on a ceiling device.""" + config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=SERIAL + ) + config_entry.add_to_hass(hass) + bulb = _mocked_ceiling() + bulb.power_level = 0 + bulb.color = [65535, 65535, 65535, 65535] + with ( + _patch_discovery(device=bulb), + _patch_config_flow_try_connect(device=bulb), + _patch_device(device=bulb), + ): + await async_setup_component(hass, lifx.DOMAIN, {lifx.DOMAIN: {}}) + await hass.async_block_till_done() + + entity_id = "light.my_bulb" + + # SKY effect test + bulb.power_level = 0 + await hass.services.async_call( + DOMAIN, + SERVICE_EFFECT_SKY, + { + ATTR_ENTITY_ID: entity_id, + ATTR_PALETTE: [], + ATTR_SKY_TYPE: "Clouds", + ATTR_CLOUD_SATURATION_MAX: 180, + ATTR_CLOUD_SATURATION_MIN: 50, + }, + blocking=True, + ) + + bulb.power_level = 65535 + bulb.effect = { + "effect": "SKY", + "palette": [], + "sky_type": 2, + "cloud_saturation_min": 50, + "cloud_saturation_max": 180, + } + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=30)) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get(entity_id) + assert state.state == STATE_ON + + assert len(bulb.set_power.calls) == 1 + assert len(bulb.set_tile_effect.calls) == 1 + call_dict = bulb.set_tile_effect.calls[0][1] + call_dict.pop("callb") + assert call_dict == { + "effect": 5, + "speed": 50, + "palette": [], + "sky_type": 2, + "cloud_saturation_min": 50, + "cloud_saturation_max": 180, + } + bulb.get_tile_effect.reset_mock() + bulb.set_tile_effect.reset_mock() + bulb.set_power.reset_mock() + + bulb.power_level = 0 + await hass.services.async_call( + DOMAIN, + SERVICE_EFFECT_SKY, + { + ATTR_ENTITY_ID: entity_id, + ATTR_PALETTE: [ + (200, 100, 1, 3500), + (241, 100, 1, 3500), + (189, 100, 8, 3500), + (40, 100, 100, 3500), + (40, 50, 100, 3500), + (0, 0, 100, 6500), + ], + ATTR_SKY_TYPE: "Sunrise", + ATTR_CLOUD_SATURATION_MAX: 180, + ATTR_CLOUD_SATURATION_MIN: 50, + }, + blocking=True, + ) + + bulb.power_level = 65535 + bulb.effect = { + "effect": "SKY", + "palette": [ + (200, 100, 1, 3500), + (241, 100, 1, 3500), + (189, 100, 8, 3500), + (40, 100, 100, 3500), + (40, 50, 100, 3500), + (0, 0, 100, 6500), + ], + "sky_type": 0, + "cloud_saturation_min": 50, + "cloud_saturation_max": 180, + } + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=30)) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get(entity_id) + assert state.state == STATE_ON + + assert len(bulb.set_power.calls) == 1 + assert len(bulb.set_tile_effect.calls) == 1 + call_dict = bulb.set_tile_effect.calls[0][1] + call_dict.pop("callb") + assert call_dict == { + "effect": 5, + "speed": 50, + "palette": [ + (36408, 65535, 65535, 3500), + (43872, 65535, 65535, 3500), + (34406, 65535, 5243, 3500), + (7281, 65535, 65535, 3500), + (7281, 32768, 65535, 3500), + (0, 0, 65535, 6500), + ], + "sky_type": 0, + "cloud_saturation_min": 50, + "cloud_saturation_max": 180, } bulb.get_tile_effect.reset_mock() bulb.set_tile_effect.reset_mock() diff --git a/tests/components/light/test_recorder.py b/tests/components/light/test_recorder.py index 49c9a567856..f3f87ff6074 100644 --- a/tests/components/light/test_recorder.py +++ b/tests/components/light/test_recorder.py @@ -9,12 +9,23 @@ import pytest from homeassistant.components import light from homeassistant.components.light import ( + ATTR_BRIGHTNESS, + ATTR_COLOR_MODE, + ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, + ATTR_EFFECT, ATTR_EFFECT_LIST, + ATTR_HS_COLOR, ATTR_MAX_COLOR_TEMP_KELVIN, ATTR_MAX_MIREDS, ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_MIN_MIREDS, + ATTR_RGB_COLOR, + ATTR_RGBW_COLOR, + ATTR_RGBWW_COLOR, ATTR_SUPPORTED_COLOR_MODES, + ATTR_XY_COLOR, + DOMAIN, ) from homeassistant.components.recorder import Recorder from homeassistant.components.recorder.history import get_significant_states @@ -50,7 +61,7 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant) await async_wait_recording_done(hass) states = await hass.async_add_executor_job( - get_significant_states, hass, now, None, hass.states.async_entity_ids() + get_significant_states, hass, now, None, hass.states.async_entity_ids(DOMAIN) ) assert len(states) >= 1 for entity_states in states.values(): @@ -62,3 +73,13 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant) assert ATTR_FRIENDLY_NAME in state.attributes assert ATTR_MAX_COLOR_TEMP_KELVIN not in state.attributes assert ATTR_MIN_COLOR_TEMP_KELVIN not in state.attributes + assert ATTR_BRIGHTNESS not in state.attributes + assert ATTR_COLOR_MODE not in state.attributes + assert ATTR_COLOR_TEMP not in state.attributes + assert ATTR_COLOR_TEMP_KELVIN not in state.attributes + assert ATTR_EFFECT not in state.attributes + assert ATTR_HS_COLOR not in state.attributes + assert ATTR_RGB_COLOR not in state.attributes + assert ATTR_RGBW_COLOR not in state.attributes + assert ATTR_RGBWW_COLOR not in state.attributes + assert ATTR_XY_COLOR not in state.attributes diff --git a/tests/components/local_todo/test_todo.py b/tests/components/local_todo/test_todo.py index e54ee925437..253adebd757 100644 --- a/tests/components/local_todo/test_todo.py +++ b/tests/components/local_todo/test_todo.py @@ -7,7 +7,17 @@ from typing import Any import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.todo import DOMAIN as TODO_DOMAIN +from homeassistant.components.todo import ( + ATTR_DESCRIPTION, + ATTR_DUE_DATE, + ATTR_DUE_DATETIME, + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, + DOMAIN as TODO_DOMAIN, + TodoServices, +) +from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from .conftest import TEST_ENTITY @@ -76,17 +86,17 @@ EXPECTED_ADD_ITEM = { ("item_data", "expected_item_data"), [ ({}, EXPECTED_ADD_ITEM), - ({"due_date": "2023-11-17"}, {**EXPECTED_ADD_ITEM, "due": "2023-11-17"}), + ({ATTR_DUE_DATE: "2023-11-17"}, {**EXPECTED_ADD_ITEM, "due": "2023-11-17"}), ( - {"due_datetime": "2023-11-17T11:30:00+00:00"}, + {ATTR_DUE_DATETIME: "2023-11-17T11:30:00+00:00"}, {**EXPECTED_ADD_ITEM, "due": "2023-11-17T05:30:00-06:00"}, ), ( - {"description": "Additional detail"}, + {ATTR_DESCRIPTION: "Additional detail"}, {**EXPECTED_ADD_ITEM, "description": "Additional detail"}, ), - ({"description": ""}, {**EXPECTED_ADD_ITEM, "description": ""}), - ({"description": None}, EXPECTED_ADD_ITEM), + ({ATTR_DESCRIPTION: ""}, {**EXPECTED_ADD_ITEM, "description": ""}), + ({ATTR_DESCRIPTION: None}, EXPECTED_ADD_ITEM), ], ) async def test_add_item( @@ -105,9 +115,9 @@ async def test_add_item( await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "replace batteries", **item_data}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "replace batteries", **item_data}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -127,12 +137,12 @@ async def test_add_item( ("item_data", "expected_item_data"), [ ({}, {}), - ({"due_date": "2023-11-17"}, {"due": "2023-11-17"}), + ({ATTR_DUE_DATE: "2023-11-17"}, {"due": "2023-11-17"}), ( {"due_datetime": "2023-11-17T11:30:00+00:00"}, {"due": "2023-11-17T05:30:00-06:00"}, ), - ({"description": "Additional detail"}, {"description": "Additional detail"}), + ({ATTR_DESCRIPTION: "Additional detail"}, {"description": "Additional detail"}), ], ) async def test_remove_item( @@ -145,9 +155,9 @@ async def test_remove_item( """Test removing a todo item.""" await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "replace batteries", **item_data}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "replace batteries", **item_data}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -165,9 +175,9 @@ async def test_remove_item( await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": [items[0]["uid"]]}, - target={"entity_id": TEST_ENTITY}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: [items[0]["uid"]]}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -188,9 +198,9 @@ async def test_bulk_remove( for i in range(5): await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": f"soda #{i}"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: f"soda #{i}"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -204,9 +214,9 @@ async def test_bulk_remove( await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": uids}, - target={"entity_id": TEST_ENTITY}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: uids}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -227,19 +237,23 @@ EXPECTED_UPDATE_ITEM = { @pytest.mark.parametrize( ("item_data", "expected_item_data", "expected_state"), [ - ({"status": "completed"}, {**EXPECTED_UPDATE_ITEM, "status": "completed"}, "0"), ( - {"due_date": "2023-11-17"}, + {ATTR_STATUS: "completed"}, + {**EXPECTED_UPDATE_ITEM, "status": "completed"}, + "0", + ), + ( + {ATTR_DUE_DATE: "2023-11-17"}, {**EXPECTED_UPDATE_ITEM, "due": "2023-11-17"}, "1", ), ( - {"due_datetime": "2023-11-17T11:30:00+00:00"}, + {ATTR_DUE_DATETIME: "2023-11-17T11:30:00+00:00"}, {**EXPECTED_UPDATE_ITEM, "due": "2023-11-17T05:30:00-06:00"}, "1", ), ( - {"description": "Additional detail"}, + {ATTR_DESCRIPTION: "Additional detail"}, {**EXPECTED_UPDATE_ITEM, "description": "Additional detail"}, "1", ), @@ -258,9 +272,9 @@ async def test_update_item( # Create new item await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "soda"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "soda"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -279,9 +293,9 @@ async def test_update_item( # Update item await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": item["uid"], **item_data}, - target={"entity_id": TEST_ENTITY}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: item["uid"], **item_data}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -303,7 +317,7 @@ async def test_update_item( ("item_data", "expected_item_data"), [ ( - {"status": "completed"}, + {ATTR_STATUS: "completed"}, { "summary": "soda", "status": "completed", @@ -312,7 +326,7 @@ async def test_update_item( }, ), ( - {"due_date": "2024-01-02"}, + {ATTR_DUE_DATE: "2024-01-02"}, { "summary": "soda", "status": "needs_action", @@ -321,7 +335,7 @@ async def test_update_item( }, ), ( - {"due_date": None}, + {ATTR_DUE_DATE: None}, { "summary": "soda", "status": "needs_action", @@ -329,7 +343,7 @@ async def test_update_item( }, ), ( - {"due_datetime": "2024-01-01 10:30:00"}, + {ATTR_DUE_DATETIME: "2024-01-01 10:30:00"}, { "summary": "soda", "status": "needs_action", @@ -338,7 +352,7 @@ async def test_update_item( }, ), ( - {"due_datetime": None}, + {ATTR_DUE_DATETIME: None}, { "summary": "soda", "status": "needs_action", @@ -346,7 +360,7 @@ async def test_update_item( }, ), ( - {"description": "updated description"}, + {ATTR_DESCRIPTION: "updated description"}, { "summary": "soda", "status": "needs_action", @@ -355,7 +369,7 @@ async def test_update_item( }, ), ( - {"description": None}, + {ATTR_DESCRIPTION: None}, {"summary": "soda", "status": "needs_action", "due": "2024-01-01"}, ), ], @@ -381,9 +395,13 @@ async def test_update_existing_field( # Create new item await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "soda", "description": "Additional detail", "due_date": "2024-01-01"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + { + ATTR_ITEM: "soda", + ATTR_DESCRIPTION: "Additional detail", + ATTR_DUE_DATE: "2024-01-01", + }, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -398,9 +416,9 @@ async def test_update_existing_field( # Perform update await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": item["uid"], **item_data}, - target={"entity_id": TEST_ENTITY}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: item["uid"], **item_data}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -424,9 +442,9 @@ async def test_rename( # Create new item await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "soda"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "soda"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -444,9 +462,9 @@ async def test_rename( # Rename item await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": item["uid"], "rename": "water"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: item["uid"], ATTR_RENAME: "water"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -501,9 +519,9 @@ async def test_move_item( for i in range(1, 5): await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": f"item {i}"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: f"item {i}"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -559,9 +577,9 @@ async def test_move_item_previous_unknown( await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "item 1"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "item 1"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) items = await ws_get_items() @@ -732,9 +750,9 @@ async def test_susbcribe( # Create new item await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "soda"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "soda"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -765,9 +783,9 @@ async def test_susbcribe( # Rename item await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": uid, "rename": "milk"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: uid, ATTR_RENAME: "milk"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) diff --git a/tests/components/lutron_caseta/test_device_trigger.py b/tests/components/lutron_caseta/test_device_trigger.py index 3e97be67da1..405c504dee1 100644 --- a/tests/components/lutron_caseta/test_device_trigger.py +++ b/tests/components/lutron_caseta/test_device_trigger.py @@ -125,7 +125,11 @@ async def _async_setup_lutron_with_picos(hass): async def test_get_triggers(hass: HomeAssistant) -> None: """Test we get the expected triggers from a lutron pico.""" config_entry_id = await _async_setup_lutron_with_picos(hass) - data: LutronCasetaData = hass.data[DOMAIN][config_entry_id] + # Fetching the config entry runtime_data is a legacy pattern + # and should not be copied for new integrations + data: LutronCasetaData = hass.config_entries.async_get_entry( + config_entry_id + ).runtime_data keypads = data.keypad_data.keypads device_id = keypads[list(keypads)[0]]["dr_device_id"] @@ -359,7 +363,11 @@ async def test_validate_trigger_config_unknown_device( """Test for no press with an unknown device.""" config_entry_id = await _async_setup_lutron_with_picos(hass) - data: LutronCasetaData = hass.data[DOMAIN][config_entry_id] + # Fetching the config entry runtime_data is a legacy pattern + # and should not be copied for new integrations + data: LutronCasetaData = hass.config_entries.async_get_entry( + config_entry_id + ).runtime_data keypads = data.keypad_data.keypads lutron_device_id = list(keypads)[0] keypad = keypads[lutron_device_id] @@ -406,7 +414,11 @@ async def test_validate_trigger_invalid_triggers( ) -> None: """Test for click_event with invalid triggers.""" config_entry_id = await _async_setup_lutron_with_picos(hass) - data: LutronCasetaData = hass.data[DOMAIN][config_entry_id] + # Fetching the config entry runtime_data is a legacy pattern + # and should not be copied for new integrations + data: LutronCasetaData = hass.config_entries.async_get_entry( + config_entry_id + ).runtime_data keypads = data.keypad_data.keypads lutron_device_id = list(keypads)[0] keypad = keypads[lutron_device_id] diff --git a/tests/components/lutron_caseta/test_logbook.py b/tests/components/lutron_caseta/test_logbook.py index b6e8840c85c..9a58838d65c 100644 --- a/tests/components/lutron_caseta/test_logbook.py +++ b/tests/components/lutron_caseta/test_logbook.py @@ -53,7 +53,11 @@ async def test_humanify_lutron_caseta_button_event(hass: HomeAssistant) -> None: await hass.async_block_till_done() - data: LutronCasetaData = hass.data[DOMAIN][config_entry.entry_id] + # Fetching the config entry runtime_data is a legacy pattern + # and should not be copied for new integrations + data: LutronCasetaData = hass.config_entries.async_get_entry( + config_entry.entry_id + ).runtime_data keypads = data.keypad_data.keypads keypad = keypads["9"] dr_device_id = keypad["dr_device_id"] diff --git a/tests/components/matrix/test_commands.py b/tests/components/matrix/test_commands.py index 8539252ad66..dabee74fdc3 100644 --- a/tests/components/matrix/test_commands.py +++ b/tests/components/matrix/test_commands.py @@ -1,11 +1,11 @@ """Test MatrixBot's ability to parse and respond to commands in matrix rooms.""" +from dataclasses import dataclass from functools import partial from itertools import chain from typing import Any from nio import MatrixRoom, RoomMessageText -from pydantic.dataclasses import dataclass import pytest from homeassistant.components.matrix import MatrixBot, RoomID diff --git a/tests/components/matter/test_adapter.py b/tests/components/matter/test_adapter.py index da2ef179c44..dfe23f0ad70 100644 --- a/tests/components/matter/test_adapter.py +++ b/tests/components/matter/test_adapter.py @@ -187,6 +187,60 @@ async def test_device_registry_single_node_composed_device( assert len(dev_reg.devices) == 1 +async def test_device_registry_single_node_with_connection( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + matter_client: MagicMock, +) -> None: + """Test that a device with mac address adds a connection to the HA device entry.""" + await setup_integration_with_node_fixture( + hass, + "thermostat", + matter_client, + ) + + assert device_registry.async_get_device(connections={("mac", "DC:54:75:5F:BA:AC")}) + + +async def test_device_registry_single_node_without_mac_address_has_no_mac_connection( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + matter_client: MagicMock, +) -> None: + """Test that a device without mac address doesn't have a `mac` connection in the HA device entry.""" + await setup_integration_with_node_fixture( + hass, + "temperature-sensor", + matter_client, + ) + + entry = device_registry.async_get_device( + identifiers={ + (DOMAIN, "deviceid_00000000000004D2-0000000000000001-MatterNodeDevice") + } + ) + + for connection_type, _ in entry.connections: + assert connection_type != dr.CONNECTION_NETWORK_MAC + + +async def test_device_registry_node_with_EUI64_address( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + matter_client: MagicMock, +) -> None: + """Test that a device with a mac address has a `zigbee` connection in the HA device entry.""" + await setup_integration_with_node_fixture( + hass, + "eve-energy-plug", + matter_client, + ) + + assert device_registry.async_get_device( + connections={("zigbee", "ca:6b:4a:23:f6:f8:bb:ee")} + ) + + async def test_multi_endpoint_name( hass: HomeAssistant, matter_client: MagicMock, diff --git a/tests/components/mealie/snapshots/test_diagnostics.ambr b/tests/components/mealie/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..e6c72c950cc --- /dev/null +++ b/tests/components/mealie/snapshots/test_diagnostics.ambr @@ -0,0 +1,505 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'about': dict({ + 'version': 'v1.10.2', + }), + 'mealplans': dict({ + 'breakfast': list([ + dict({ + 'description': None, + 'entry_type': 'breakfast', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-23', + }), + 'mealplan_id': 229, + 'recipe': dict({ + 'description': 'The BEST Roast Chicken recipe is simple, budget friendly, and gives you a tender, mouth-watering chicken full of flavor! Served with roasted vegetables, this recipe is simple enough for any cook!', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'JeQ2', + 'name': 'Roast Chicken', + 'original_url': 'https://tastesbetterfromscratch.com/roast-chicken/', + 'recipe_id': '5b055066-d57d-4fd0-8dfd-a2c2f07b36f1', + 'recipe_yield': '6 servings', + 'slug': 'roast-chicken', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + ]), + 'dinner': list([ + dict({ + 'description': None, + 'entry_type': 'dinner', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-22', + }), + 'mealplan_id': 230, + 'recipe': dict({ + 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'AiIo', + 'name': 'Zoete aardappel curry traybake', + 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', + 'recipe_id': 'c5f00a93-71a2-4e48-900f-d9ad0bb9de93', + 'recipe_yield': '2 servings', + 'slug': 'zoete-aardappel-curry-traybake', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'dinner', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-23', + }), + 'mealplan_id': 222, + 'recipe': dict({ + 'description': 'Εύκολη μακαρονάδα με κεφτεδάκια στον φούρνο από τον Άκη Πετρετζίκη. Φτιάξτε την πιο εύκολη μακαρονάδα με κεφτεδάκια σε μόνο ένα σκεύος.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'En9o', + 'name': 'Εύκολη μακαρονάδα με κεφτεδάκια στον φούρνο (1)', + 'original_url': 'https://akispetretzikis.com/recipe/7959/efkolh-makaronada-me-keftedakia-ston-fourno', + 'recipe_id': 'f79f7e9d-4b58-4930-a586-2b127f16ee34', + 'recipe_yield': '6 servings', + 'slug': 'eukole-makaronada-me-kephtedakia-ston-phourno-1', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'dinner', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-23', + }), + 'mealplan_id': 221, + 'recipe': dict({ + 'description': 'Delicious Greek turkey meatballs with lemon orzo, tender veggies, and a creamy feta yogurt sauce. These healthy baked Greek turkey meatballs are filled with tons of wonderful herbs and make the perfect protein-packed weeknight meal!', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'Kn62', + 'name': 'Greek Turkey Meatballs with Lemon Orzo & Creamy Feta Yogurt Sauce', + 'original_url': 'https://www.ambitiouskitchen.com/greek-turkey-meatballs/', + 'recipe_id': '47595e4c-52bc-441d-b273-3edf4258806d', + 'recipe_yield': '4 servings', + 'slug': 'greek-turkey-meatballs-with-lemon-orzo-creamy-feta-yogurt-sauce', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'dinner', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-23', + }), + 'mealplan_id': 219, + 'recipe': dict({ + 'description': 'This is a modified Pampered Chef recipe. You can use a trifle bowl or large glass punch/salad bowl to show it off. It is really easy to make and I never have any leftovers. Cook time includes chill time.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'ibL6', + 'name': 'Pampered Chef Double Chocolate Mocha Trifle', + 'original_url': 'https://www.food.com/recipe/pampered-chef-double-chocolate-mocha-trifle-74963', + 'recipe_id': '92635fd0-f2dc-4e78-a6e4-ecd556ad361f', + 'recipe_yield': '12 servings', + 'slug': 'pampered-chef-double-chocolate-mocha-trifle', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'dinner', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-22', + }), + 'mealplan_id': 217, + 'recipe': dict({ + 'description': 'Cheeseburger Sliders are juicy, cheesy and beefy - everything we love about classic burgers! These sliders are quick and easy plus they are make-ahead and reheat really well.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'beGq', + 'name': 'Cheeseburger Sliders (Easy, 30-min Recipe)', + 'original_url': 'https://natashaskitchen.com/cheeseburger-sliders/', + 'recipe_id': '8bdd3656-5e7e-45d3-a3c4-557390846a22', + 'recipe_yield': '24 servings', + 'slug': 'cheeseburger-sliders-easy-30-min-recipe', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'dinner', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-23', + }), + 'mealplan_id': 212, + 'recipe': dict({ + 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': '356X', + 'name': 'All-American Beef Stew Recipe', + 'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe', + 'recipe_id': '48f39d27-4b8e-4c14-bf36-4e1e6497e75e', + 'recipe_yield': '6 servings', + 'slug': 'all-american-beef-stew-recipe', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'dinner', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-22', + }), + 'mealplan_id': 211, + 'recipe': dict({ + 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'nOPT', + 'name': 'Einfacher Nudelauflauf mit Brokkoli', + 'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/', + 'recipe_id': '9d553779-607e-471b-acf3-84e6be27b159', + 'recipe_yield': '4 servings', + 'slug': 'einfacher-nudelauflauf-mit-brokkoli', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'dinner', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-23', + }), + 'mealplan_id': 196, + 'recipe': dict({ + 'description': 'Simple to prepare and ready in 25 minutes, this vegetarian miso noodle recipe can be eaten on its own or served as a side.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': '5G1v', + 'name': 'Miso Udon Noodles with Spinach and Tofu', + 'original_url': 'https://www.allrecipes.com/recipe/284039/miso-udon-noodles-with-spinach-and-tofu/', + 'recipe_id': '25b814f2-d9bf-4df0-b40d-d2f2457b4317', + 'recipe_yield': '2 servings', + 'slug': 'miso-udon-noodles-with-spinach-and-tofu', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'dinner', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-22', + }), + 'mealplan_id': 195, + 'recipe': dict({ + 'description': 'Avis aux nostalgiques des années 1980, la mousse de saumon est de retour dans une présentation adaptée au goût du jour. On utilise une technique sans faille : un saumon frais cuit au micro-ondes et mélangé au robot avec du fromage à la crème et de la crème sure. On obtient ainsi une texture onctueuse à tartiner, qui n’a rien à envier aux préparations gélatineuses d’antan !', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'rrNL', + 'name': 'Mousse de saumon', + 'original_url': 'https://www.ricardocuisine.com/recettes/8919-mousse-de-saumon', + 'recipe_id': '55c88810-4cf1-4d86-ae50-63b15fd173fb', + 'recipe_yield': '12 servings', + 'slug': 'mousse-de-saumon', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': 'Dineren met de boys', + 'entry_type': 'dinner', + 'group_id': '3931df86-0679-4579-8c63-4bedc9ca9a85', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-21', + }), + 'mealplan_id': 1, + 'recipe': None, + 'title': 'Aquavite', + 'user_id': '6caa6e4d-521f-4ef4-9ed7-388bdd63f47d', + }), + ]), + 'lunch': list([ + dict({ + 'description': None, + 'entry_type': 'lunch', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-23', + }), + 'mealplan_id': 226, + 'recipe': dict({ + 'description': 'Te explicamos paso a paso, de manera sencilla, la elaboración de la receta de pollo al curry con leche de coco en 10 minutos. Ingredientes, tiempo de...', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'INQz', + 'name': 'Receta de pollo al curry en 10 minutos (con vídeo incluido)', + 'original_url': 'https://www.directoalpaladar.com/recetas-de-carnes-y-aves/receta-de-pollo-al-curry-en-10-minutos', + 'recipe_id': 'e360a0cc-18b0-4a84-a91b-8aa59e2451c9', + 'recipe_yield': '2 servings', + 'slug': 'receta-de-pollo-al-curry-en-10-minutos-con-video-incluido', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'lunch', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-23', + }), + 'mealplan_id': 224, + 'recipe': dict({ + 'description': 'bourguignon, oignon, carotte, bouquet garni, vin rouge, beurre, sel, poivre', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'nj5M', + 'name': 'Boeuf bourguignon : la vraie recette (2)', + 'original_url': 'https://www.marmiton.org/recettes/recette_boeuf-bourguignon_18889.aspx', + 'recipe_id': '9c7b8aee-c93c-4b1b-ab48-2625d444743a', + 'recipe_yield': '4 servings', + 'slug': 'boeuf-bourguignon-la-vraie-recette-2', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'lunch', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-22', + }), + 'mealplan_id': 216, + 'recipe': dict({ + 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': '356X', + 'name': 'All-American Beef Stew Recipe', + 'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe', + 'recipe_id': '48f39d27-4b8e-4c14-bf36-4e1e6497e75e', + 'recipe_yield': '6 servings', + 'slug': 'all-american-beef-stew-recipe', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + ]), + 'side': list([ + dict({ + 'description': None, + 'entry_type': 'side', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-23', + }), + 'mealplan_id': 220, + 'recipe': dict({ + 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'nOPT', + 'name': 'Einfacher Nudelauflauf mit Brokkoli', + 'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/', + 'recipe_id': '9d553779-607e-471b-acf3-84e6be27b159', + 'recipe_yield': '4 servings', + 'slug': 'einfacher-nudelauflauf-mit-brokkoli', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + ]), + }), + 'shoppinglist': dict({ + '27edbaab-2ec6-441f-8490-0283ea77585f': dict({ + 'items': list([ + dict({ + 'checked': False, + 'disable_amount': True, + 'display': '2 Apples', + 'food_id': None, + 'is_food': False, + 'item_id': 'f45430f7-3edf-45a9-a50f-73bb375090be', + 'label_id': None, + 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', + 'note': 'Apples', + 'position': 0, + 'quantity': 2.0, + 'unit_id': None, + }), + dict({ + 'checked': False, + 'disable_amount': False, + 'display': '1 can acorn squash', + 'food_id': '09322430-d24c-4b1a-abb6-22b6ed3a88f5', + 'is_food': True, + 'item_id': '84d8fd74-8eb0-402e-84b6-71f251bfb7cc', + 'label_id': None, + 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', + 'note': '', + 'position': 1, + 'quantity': 1.0, + 'unit_id': '7bf539d4-fc78-48bc-b48e-c35ccccec34a', + }), + dict({ + 'checked': False, + 'disable_amount': False, + 'display': 'aubergine', + 'food_id': '96801494-4e26-4148-849a-8155deb76327', + 'is_food': True, + 'item_id': '69913b9a-7c75-4935-abec-297cf7483f88', + 'label_id': None, + 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', + 'note': '', + 'position': 2, + 'quantity': 0.0, + 'unit_id': None, + }), + ]), + 'shopping_list': dict({ + 'list_id': '27edbaab-2ec6-441f-8490-0283ea77585f', + 'name': 'Supermarket', + }), + }), + 'e9d78ff2-4b23-4b77-a3a8-464827100b46': dict({ + 'items': list([ + dict({ + 'checked': False, + 'disable_amount': True, + 'display': '2 Apples', + 'food_id': None, + 'is_food': False, + 'item_id': 'f45430f7-3edf-45a9-a50f-73bb375090be', + 'label_id': None, + 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', + 'note': 'Apples', + 'position': 0, + 'quantity': 2.0, + 'unit_id': None, + }), + dict({ + 'checked': False, + 'disable_amount': False, + 'display': '1 can acorn squash', + 'food_id': '09322430-d24c-4b1a-abb6-22b6ed3a88f5', + 'is_food': True, + 'item_id': '84d8fd74-8eb0-402e-84b6-71f251bfb7cc', + 'label_id': None, + 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', + 'note': '', + 'position': 1, + 'quantity': 1.0, + 'unit_id': '7bf539d4-fc78-48bc-b48e-c35ccccec34a', + }), + dict({ + 'checked': False, + 'disable_amount': False, + 'display': 'aubergine', + 'food_id': '96801494-4e26-4148-849a-8155deb76327', + 'is_food': True, + 'item_id': '69913b9a-7c75-4935-abec-297cf7483f88', + 'label_id': None, + 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', + 'note': '', + 'position': 2, + 'quantity': 0.0, + 'unit_id': None, + }), + ]), + 'shopping_list': dict({ + 'list_id': 'e9d78ff2-4b23-4b77-a3a8-464827100b46', + 'name': 'Freezer', + }), + }), + 'f8438635-8211-4be8-80d0-0aa42e37a5f2': dict({ + 'items': list([ + dict({ + 'checked': False, + 'disable_amount': True, + 'display': '2 Apples', + 'food_id': None, + 'is_food': False, + 'item_id': 'f45430f7-3edf-45a9-a50f-73bb375090be', + 'label_id': None, + 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', + 'note': 'Apples', + 'position': 0, + 'quantity': 2.0, + 'unit_id': None, + }), + dict({ + 'checked': False, + 'disable_amount': False, + 'display': '1 can acorn squash', + 'food_id': '09322430-d24c-4b1a-abb6-22b6ed3a88f5', + 'is_food': True, + 'item_id': '84d8fd74-8eb0-402e-84b6-71f251bfb7cc', + 'label_id': None, + 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', + 'note': '', + 'position': 1, + 'quantity': 1.0, + 'unit_id': '7bf539d4-fc78-48bc-b48e-c35ccccec34a', + }), + dict({ + 'checked': False, + 'disable_amount': False, + 'display': 'aubergine', + 'food_id': '96801494-4e26-4148-849a-8155deb76327', + 'is_food': True, + 'item_id': '69913b9a-7c75-4935-abec-297cf7483f88', + 'label_id': None, + 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', + 'note': '', + 'position': 2, + 'quantity': 0.0, + 'unit_id': None, + }), + ]), + 'shopping_list': dict({ + 'list_id': 'f8438635-8211-4be8-80d0-0aa42e37a5f2', + 'name': 'Special groceries', + }), + }), + }), + }) +# --- diff --git a/tests/components/mealie/test_config_flow.py b/tests/components/mealie/test_config_flow.py index 777bb1e4ad1..8edc89c3213 100644 --- a/tests/components/mealie/test_config_flow.py +++ b/tests/components/mealie/test_config_flow.py @@ -2,15 +2,17 @@ from unittest.mock import AsyncMock -from aiomealie import MealieAuthenticationError, MealieConnectionError +from aiomealie import About, MealieAuthenticationError, MealieConnectionError import pytest from homeassistant.components.mealie.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_API_TOKEN, CONF_HOST +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_RECONFIGURE, SOURCE_USER +from homeassistant.const import CONF_API_TOKEN, CONF_HOST, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from . import setup_integration + from tests.common import MockConfigEntry @@ -36,6 +38,7 @@ async def test_full_flow( assert result["data"] == { CONF_HOST: "demo.mealie.io", CONF_API_TOKEN: "token", + CONF_VERIFY_SSL: True, } assert result["result"].unique_id == "bf1c62fe-4941-4332-9886-e54e88dbdba0" @@ -79,10 +82,43 @@ async def test_flow_errors( result["flow_id"], {CONF_HOST: "demo.mealie.io", CONF_API_TOKEN: "token"}, ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY +@pytest.mark.parametrize( + ("version"), + [ + ("v1.0.0beta-5"), + ("v1.0.0-RC2"), + ("v0.1.0"), + ("something"), + ], +) +async def test_flow_version_error( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_setup_entry: AsyncMock, + version, +) -> None: + """Test flow version error.""" + mock_mealie_client.get_about.return_value = About(version=version) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "demo.mealie.io", CONF_API_TOKEN: "token"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "mealie_version"} + + async def test_duplicate( hass: HomeAssistant, mock_mealie_client: AsyncMock, @@ -106,3 +142,213 @@ async def test_duplicate( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +async def test_reauth_flow( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reauth flow.""" + await setup_integration(hass, mock_config_entry) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "entry_id": mock_config_entry.entry_id}, + data=mock_config_entry.data, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_TOKEN: "token2"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert mock_config_entry.data[CONF_API_TOKEN] == "token2" + + +async def test_reauth_flow_wrong_account( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reauth flow with wrong account.""" + await setup_integration(hass, mock_config_entry) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "entry_id": mock_config_entry.entry_id}, + data=mock_config_entry.data, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + mock_mealie_client.get_user_info.return_value.user_id = "wrong_user_id" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_TOKEN: "token2"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "wrong_account" + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (MealieConnectionError, "cannot_connect"), + (MealieAuthenticationError, "invalid_auth"), + (Exception, "unknown"), + ], +) +async def test_reauth_flow_exceptions( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, + exception: Exception, + error: str, +) -> None: + """Test reauth flow errors.""" + await setup_integration(hass, mock_config_entry) + mock_mealie_client.get_user_info.side_effect = exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "entry_id": mock_config_entry.entry_id}, + data=mock_config_entry.data, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_TOKEN: "token"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + assert result["errors"] == {"base": error} + + mock_mealie_client.get_user_info.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_TOKEN: "token"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + + +async def test_reconfigure_flow( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfigure flow.""" + await setup_integration(hass, mock_config_entry) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, + data=mock_config_entry.data, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "http://test:9090", + CONF_API_TOKEN: "token2", + CONF_VERIFY_SSL: False, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert mock_config_entry.data[CONF_API_TOKEN] == "token2" + assert mock_config_entry.data[CONF_HOST] == "http://test:9090" + assert mock_config_entry.data[CONF_VERIFY_SSL] is False + + +async def test_reconfigure_flow_wrong_account( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfigure flow with wrong account.""" + await setup_integration(hass, mock_config_entry) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, + data=mock_config_entry.data, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure_confirm" + + mock_mealie_client.get_user_info.return_value.user_id = "wrong_user_id" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "http://test:9090", CONF_API_TOKEN: "token2"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "wrong_account" + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (MealieConnectionError, "cannot_connect"), + (MealieAuthenticationError, "invalid_auth"), + (Exception, "unknown"), + ], +) +async def test_reconfigure_flow_exceptions( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, + exception: Exception, + error: str, +) -> None: + """Test reconfigure flow errors.""" + await setup_integration(hass, mock_config_entry) + mock_mealie_client.get_user_info.side_effect = exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, + data=mock_config_entry.data, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "http://test:9090", CONF_API_TOKEN: "token"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure_confirm" + assert result["errors"] == {"base": error} + + mock_mealie_client.get_user_info.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "http://test:9090", CONF_API_TOKEN: "token"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" diff --git a/tests/components/mealie/test_diagnostics.py b/tests/components/mealie/test_diagnostics.py new file mode 100644 index 00000000000..88680da9784 --- /dev/null +++ b/tests/components/mealie/test_diagnostics.py @@ -0,0 +1,28 @@ +"""Test Mealie diagnostics.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + await setup_integration(hass, mock_config_entry) + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, mock_config_entry) + == snapshot + ) diff --git a/tests/components/mealie/test_init.py b/tests/components/mealie/test_init.py index bec03ab3719..0050aa58bb8 100644 --- a/tests/components/mealie/test_init.py +++ b/tests/components/mealie/test_init.py @@ -2,7 +2,7 @@ from unittest.mock import AsyncMock -from aiomealie import MealieAuthenticationError, MealieConnectionError +from aiomealie import About, MealieAuthenticationError, MealieConnectionError import pytest from syrupy import SnapshotAssertion @@ -32,6 +32,51 @@ async def test_device_info( assert device_entry == snapshot +@pytest.mark.parametrize( + ("exc", "state"), + [ + (MealieConnectionError, ConfigEntryState.SETUP_RETRY), + (MealieAuthenticationError, ConfigEntryState.SETUP_ERROR), + ], +) +async def test_setup_failure( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + exc: Exception, + state: ConfigEntryState, +) -> None: + """Test setup failure.""" + mock_mealie_client.get_about.side_effect = exc + + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is state + + +@pytest.mark.parametrize( + ("version"), + [ + ("v1.0.0beta-5"), + ("v1.0.0-RC2"), + ("v0.1.0"), + ("something"), + ], +) +async def test_setup_too_old( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + version, +) -> None: + """Test setup of Mealie entry with too old version of Mealie.""" + mock_mealie_client.get_about.return_value = About(version=version) + + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + async def test_load_unload_entry( hass: HomeAssistant, mock_mealie_client: AsyncMock, @@ -90,25 +135,3 @@ async def test_shoppingitems_initialization_failure( await setup_integration(hass, mock_config_entry) assert mock_config_entry.state is state - - -@pytest.mark.parametrize( - ("exc", "state"), - [ - (MealieConnectionError, ConfigEntryState.SETUP_ERROR), - (MealieAuthenticationError, ConfigEntryState.SETUP_ERROR), - ], -) -async def test_shoppinglists_initialization_failure( - hass: HomeAssistant, - mock_mealie_client: AsyncMock, - mock_config_entry: MockConfigEntry, - exc: Exception, - state: ConfigEntryState, -) -> None: - """Test initialization failure.""" - mock_mealie_client.get_shopping_lists.side_effect = exc - - await setup_integration(hass, mock_config_entry) - - assert mock_config_entry.state is state diff --git a/tests/components/mealie/test_services.py b/tests/components/mealie/test_services.py index b6928f88f2c..c655d899416 100644 --- a/tests/components/mealie/test_services.py +++ b/tests/components/mealie/test_services.py @@ -65,8 +65,8 @@ async def test_service_mealplan( SERVICE_GET_MEALPLAN, { ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, - ATTR_START_DATE: date(2023, 10, 22), - ATTR_END_DATE: date(2023, 10, 25), + ATTR_START_DATE: "2023-10-22", + ATTR_END_DATE: "2023-10-25", }, blocking=True, return_response=True, @@ -82,7 +82,7 @@ async def test_service_mealplan( SERVICE_GET_MEALPLAN, { ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, - ATTR_START_DATE: date(2023, 10, 19), + ATTR_START_DATE: "2023-10-19", }, blocking=True, return_response=True, @@ -98,7 +98,7 @@ async def test_service_mealplan( SERVICE_GET_MEALPLAN, { ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, - ATTR_END_DATE: date(2023, 10, 22), + ATTR_END_DATE: "2023-10-22", }, blocking=True, return_response=True, @@ -115,8 +115,8 @@ async def test_service_mealplan( SERVICE_GET_MEALPLAN, { ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, - ATTR_START_DATE: date(2023, 10, 22), - ATTR_END_DATE: date(2023, 10, 19), + ATTR_START_DATE: "2023-10-22", + ATTR_END_DATE: "2023-10-19", }, blocking=True, return_response=True, diff --git a/tests/components/mealie/test_todo.py b/tests/components/mealie/test_todo.py index 2b65d8b468f..36bcaa05124 100644 --- a/tests/components/mealie/test_todo.py +++ b/tests/components/mealie/test_todo.py @@ -1,12 +1,22 @@ """Tests for the Mealie todo.""" +from datetime import timedelta from unittest.mock import AsyncMock, patch +from aiomealie import ShoppingListsResponse from aiomealie.exceptions import MealieError +from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.todo import DOMAIN as TODO_DOMAIN +from homeassistant.components.mealie import DOMAIN +from homeassistant.components.todo import ( + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, + DOMAIN as TODO_DOMAIN, + TodoServices, +) from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -14,7 +24,12 @@ from homeassistant.helpers import entity_registry as er from . import setup_integration -from tests.common import MockConfigEntry, snapshot_platform +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_fixture, + snapshot_platform, +) async def test_entities( @@ -41,8 +56,8 @@ async def test_add_todo_list_item( await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "Soda"}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Soda"}, target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, blocking=True, ) @@ -63,8 +78,8 @@ async def test_add_todo_list_item_error( with pytest.raises(HomeAssistantError): await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "Soda"}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Soda"}, target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, blocking=True, ) @@ -80,8 +95,8 @@ async def test_update_todo_list_item( await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "aubergine", "rename": "Eggplant", "status": "completed"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "aubergine", ATTR_RENAME: "Eggplant", ATTR_STATUS: "completed"}, target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, blocking=True, ) @@ -102,8 +117,8 @@ async def test_update_todo_list_item_error( with pytest.raises(HomeAssistantError): await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "aubergine", "rename": "Eggplant", "status": "completed"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "aubergine", ATTR_RENAME: "Eggplant", ATTR_STATUS: "completed"}, target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, blocking=True, ) @@ -119,8 +134,8 @@ async def test_delete_todo_list_item( await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": "aubergine"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: "aubergine"}, target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, blocking=True, ) @@ -142,8 +157,42 @@ async def test_delete_todo_list_item_error( with pytest.raises(HomeAssistantError): await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": "aubergine"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: "aubergine"}, target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, blocking=True, ) + + +async def test_runtime_management( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test for creating and deleting shopping lists.""" + response = ShoppingListsResponse.from_json( + load_fixture("get_shopping_lists.json", DOMAIN) + ).items + mock_mealie_client.get_shopping_lists.return_value = ShoppingListsResponse( + items=[response[0]] + ) + await setup_integration(hass, mock_config_entry) + assert hass.states.get("todo.mealie_supermarket") is not None + assert hass.states.get("todo.mealie_special_groceries") is None + + mock_mealie_client.get_shopping_lists.return_value = ShoppingListsResponse( + items=response[0:2] + ) + freezer.tick(timedelta(minutes=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert hass.states.get("todo.mealie_special_groceries") is not None + + mock_mealie_client.get_shopping_lists.return_value = ShoppingListsResponse( + items=[response[0]] + ) + freezer.tick(timedelta(minutes=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert hass.states.get("todo.mealie_special_groceries") is None diff --git a/tests/components/ourgroceries/test_todo.py b/tests/components/ourgroceries/test_todo.py index 672e2e14447..d364881b624 100644 --- a/tests/components/ourgroceries/test_todo.py +++ b/tests/components/ourgroceries/test_todo.py @@ -7,8 +7,14 @@ from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.ourgroceries.coordinator import SCAN_INTERVAL -from homeassistant.components.todo import DOMAIN as TODO_DOMAIN -from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.components.todo import ( + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, + DOMAIN as TODO_DOMAIN, + TodoServices, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_component import async_update_entity @@ -69,9 +75,9 @@ async def test_add_todo_list_item( await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "Soda"}, - target={"entity_id": "todo.test_list"}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Soda"}, + target={ATTR_ENTITY_ID: "todo.test_list"}, blocking=True, ) @@ -108,9 +114,9 @@ async def test_update_todo_item_status( await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "12345", "status": "completed"}, - target={"entity_id": "todo.test_list"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "12345", ATTR_STATUS: "completed"}, + target={ATTR_ENTITY_ID: "todo.test_list"}, blocking=True, ) assert ourgroceries.toggle_item_crossed_off.called @@ -132,9 +138,9 @@ async def test_update_todo_item_status( await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "12345", "status": "needs_action"}, - target={"entity_id": "todo.test_list"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "12345", ATTR_STATUS: "needs_action"}, + target={ATTR_ENTITY_ID: "todo.test_list"}, blocking=True, ) assert ourgroceries.toggle_item_crossed_off.called @@ -181,9 +187,9 @@ async def test_update_todo_item_summary( await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "12345", "rename": "Milk"}, - target={"entity_id": "todo.test_list"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "12345", ATTR_RENAME: "Milk"}, + target={ATTR_ENTITY_ID: "todo.test_list"}, blocking=True, ) assert ourgroceries.change_item_on_list @@ -218,9 +224,9 @@ async def test_remove_todo_item( await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": ["12345", "54321"]}, - target={"entity_id": "todo.test_list"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["12345", "54321"]}, + target={ATTR_ENTITY_ID: "todo.test_list"}, blocking=True, ) assert ourgroceries.remove_item_from_list.call_count == 2 diff --git a/tests/components/picnic/test_todo.py b/tests/components/picnic/test_todo.py index cdd30967058..2db5bc90159 100644 --- a/tests/components/picnic/test_todo.py +++ b/tests/components/picnic/test_todo.py @@ -5,7 +5,8 @@ from unittest.mock import MagicMock, Mock import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.todo import DOMAIN +from homeassistant.components.todo import ATTR_ITEM, DOMAIN, TodoServices +from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError @@ -91,9 +92,9 @@ async def test_create_todo_list_item( await hass.services.async_call( DOMAIN, - "add_item", - {"item": "Melk"}, - target={"entity_id": ENTITY_ID}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Melk"}, + target={ATTR_ENTITY_ID: ENTITY_ID}, blocking=True, ) @@ -119,8 +120,8 @@ async def test_create_todo_list_item_not_found( with pytest.raises(ServiceValidationError): await hass.services.async_call( DOMAIN, - "add_item", - {"item": "Melk"}, - target={"entity_id": ENTITY_ID}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Melk"}, + target={ATTR_ENTITY_ID: ENTITY_ID}, blocking=True, ) diff --git a/tests/components/pyload/test_sensor.py b/tests/components/pyload/test_sensor.py index 8eccda07fa7..8c194a111ea 100644 --- a/tests/components/pyload/test_sensor.py +++ b/tests/components/pyload/test_sensor.py @@ -157,3 +157,25 @@ async def test_deprecated_yaml( assert issue_registry.async_get_issue( domain=HOMEASSISTANT_DOMAIN, issue_id=f"deprecated_yaml_{DOMAIN}" ) + + +async def test_pyload_pre_0_5_0( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pyloadapi: AsyncMock, +) -> None: + """Test setup of the pyload sensor platform.""" + mock_pyloadapi.get_status.return_value = { + "pause": False, + "active": 1, + "queue": 6, + "total": 37, + "speed": 5405963.0, + "download": True, + "reconnect": False, + } + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED diff --git a/tests/components/recorder/test_init.py b/tests/components/recorder/test_init.py index adfc451c742..5715e994d2e 100644 --- a/tests/components/recorder/test_init.py +++ b/tests/components/recorder/test_init.py @@ -6,6 +6,7 @@ import asyncio from collections.abc import Generator from datetime import datetime, timedelta import sqlite3 +import sys import threading from typing import Any, cast from unittest.mock import MagicMock, Mock, patch @@ -1698,7 +1699,9 @@ async def test_database_corruption_while_running( hass.states.async_set("test.lost", "on", {}) sqlite3_exception = DatabaseError("statement", {}, []) - sqlite3_exception.__cause__ = sqlite3.DatabaseError() + sqlite3_exception.__cause__ = sqlite3.DatabaseError( + "database disk image is malformed" + ) await async_wait_recording_done(hass) with patch.object( @@ -1883,7 +1886,9 @@ async def test_database_lock_and_overflow( with ( patch.object(recorder.core, "MAX_QUEUE_BACKLOG_MIN_VALUE", 1), patch.object(recorder.core, "DB_LOCK_QUEUE_CHECK_TIMEOUT", 0.01), - patch.object(recorder.core, "QUEUE_PERCENTAGE_ALLOWED_AVAILABLE_MEMORY", 0), + patch.object( + recorder.core, "MIN_AVAILABLE_MEMORY_FOR_QUEUE_BACKLOG", sys.maxsize + ), ): await async_setup_recorder_instance(hass, config) await hass.async_block_till_done() @@ -1943,26 +1948,43 @@ async def test_database_lock_and_overflow_checks_available_memory( ) ) - await async_setup_recorder_instance(hass, config) - await hass.async_block_till_done() + with patch( + "homeassistant.components.recorder.core.QUEUE_CHECK_INTERVAL", + timedelta(seconds=1), + ): + await async_setup_recorder_instance(hass, config) + await hass.async_block_till_done() event_type = "EVENT_TEST" event_types = (event_type,) await async_wait_recording_done(hass) + min_available_memory = 256 * 1024**2 + + out_of_ram = False + + def _get_available_memory(*args: Any, **kwargs: Any) -> int: + nonlocal out_of_ram + return min_available_memory / 2 if out_of_ram else min_available_memory with ( patch.object(recorder.core, "MAX_QUEUE_BACKLOG_MIN_VALUE", 1), - patch.object(recorder.core, "QUEUE_PERCENTAGE_ALLOWED_AVAILABLE_MEMORY", 1), + patch.object( + recorder.core, + "MIN_AVAILABLE_MEMORY_FOR_QUEUE_BACKLOG", + min_available_memory, + ), patch.object(recorder.core, "DB_LOCK_QUEUE_CHECK_TIMEOUT", 0.01), patch.object( recorder.core.Recorder, "_available_memory", - return_value=recorder.core.ESTIMATED_QUEUE_ITEM_SIZE * 4, + side_effect=_get_available_memory, ), ): instance = get_instance(hass) - await instance.lock_database() + assert await instance.lock_database() + db_events = await instance.async_add_executor_job(_get_db_events) + assert len(db_events) == 0 # Record up to the extended limit (which takes into account the available memory) for _ in range(2): event_data = {"test_attr": 5, "test_attr_10": "nice"} @@ -1979,6 +2001,7 @@ async def test_database_lock_and_overflow_checks_available_memory( assert "Database queue backlog reached more than" not in caplog.text + out_of_ram = True # Record beyond the extended limit (which takes into account the available memory) for _ in range(20): event_data = {"test_attr": 5, "test_attr_10": "nice"} @@ -2700,3 +2723,20 @@ async def test_all_tables_use_default_table_args(hass: HomeAssistant) -> None: """Test that all tables use the default table args.""" for table in db_schema.Base.metadata.tables.values(): assert table.kwargs.items() >= db_schema._DEFAULT_TABLE_ARGS.items() + + +async def test_empty_entity_id( + hass: HomeAssistant, + async_setup_recorder_instance: RecorderInstanceGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test the recorder can handle an empty entity_id.""" + await async_setup_recorder_instance( + hass, + { + "exclude": {"domains": "hidden_domain"}, + }, + ) + hass.bus.async_fire("hello", {"entity_id": ""}) + await async_wait_recording_done(hass) + assert "Invalid entity ID" not in caplog.text diff --git a/tests/components/recorder/test_migrate.py b/tests/components/recorder/test_migrate.py index 25fe8993cfb..c10970e1f3b 100644 --- a/tests/components/recorder/test_migrate.py +++ b/tests/components/recorder/test_migrate.py @@ -174,7 +174,9 @@ async def test_database_migration_encounters_corruption( assert recorder.util.async_migration_in_progress(hass) is False sqlite3_exception = DatabaseError("statement", {}, []) - sqlite3_exception.__cause__ = sqlite3.DatabaseError() + sqlite3_exception.__cause__ = sqlite3.DatabaseError( + "database disk image is malformed" + ) with ( patch( @@ -292,7 +294,9 @@ async def test_events_during_migration_queue_exhausted( new=create_engine_test, ), patch.object(recorder.core, "MAX_QUEUE_BACKLOG_MIN_VALUE", 1), - patch.object(recorder.core, "QUEUE_PERCENTAGE_ALLOWED_AVAILABLE_MEMORY", 0), + patch.object( + recorder.core, "MIN_AVAILABLE_MEMORY_FOR_QUEUE_BACKLOG", sys.maxsize + ), ): await async_setup_recorder_instance( hass, {"commit_interval": 0}, wait_recorder=False diff --git a/tests/components/recorder/test_purge.py b/tests/components/recorder/test_purge.py index 5e6a413d64e..b206fefc392 100644 --- a/tests/components/recorder/test_purge.py +++ b/tests/components/recorder/test_purge.py @@ -204,7 +204,7 @@ async def test_purge_old_states_encouters_database_corruption( await async_wait_recording_done(hass) sqlite3_exception = DatabaseError("statement", {}, []) - sqlite3_exception.__cause__ = sqlite3.DatabaseError() + sqlite3_exception.__cause__ = sqlite3.DatabaseError("not a database") with ( patch( diff --git a/tests/components/recorder/test_purge_v32_schema.py b/tests/components/recorder/test_purge_v32_schema.py index 51424c31ea2..9f3a124629a 100644 --- a/tests/components/recorder/test_purge_v32_schema.py +++ b/tests/components/recorder/test_purge_v32_schema.py @@ -178,7 +178,7 @@ async def test_purge_old_states_encouters_database_corruption( await async_wait_recording_done(hass) sqlite3_exception = DatabaseError("statement", {}, []) - sqlite3_exception.__cause__ = sqlite3.DatabaseError() + sqlite3_exception.__cause__ = sqlite3.DatabaseError("not a database") with ( patch( diff --git a/tests/components/recorder/test_v32_migration.py b/tests/components/recorder/test_v32_migration.py index 4e809d02446..666629d4bcf 100644 --- a/tests/components/recorder/test_v32_migration.py +++ b/tests/components/recorder/test_v32_migration.py @@ -3,14 +3,14 @@ from datetime import timedelta import importlib import sys -from unittest.mock import patch +from unittest.mock import DEFAULT, patch import pytest from sqlalchemy import create_engine, inspect from sqlalchemy.orm import Session from homeassistant.components import recorder -from homeassistant.components.recorder import core, statistics +from homeassistant.components.recorder import core, migration, statistics from homeassistant.components.recorder.queries import select_event_type_ids from homeassistant.components.recorder.util import session_scope from homeassistant.core import EVENT_STATE_CHANGED, Event, EventOrigin, State @@ -104,21 +104,14 @@ async def test_migrate_times( patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), patch(CREATE_ENGINE_TARGET, new=_create_engine_test), - patch( - "homeassistant.components.recorder.Recorder._migrate_events_context_ids", - ), - patch( - "homeassistant.components.recorder.Recorder._migrate_states_context_ids", - ), - patch( - "homeassistant.components.recorder.Recorder._migrate_event_type_ids", - ), - patch( - "homeassistant.components.recorder.Recorder._migrate_entity_ids", - ), - patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), - patch( - "homeassistant.components.recorder.Recorder._cleanup_legacy_states_event_ids" + patch.multiple( + "homeassistant.components.recorder.Recorder", + _migrate_events_context_ids=DEFAULT, + _migrate_states_context_ids=DEFAULT, + _migrate_event_type_ids=DEFAULT, + _migrate_entity_ids=DEFAULT, + _post_migrate_entity_ids=DEFAULT, + _cleanup_legacy_states_event_ids=DEFAULT, ), ): async with ( @@ -267,21 +260,14 @@ async def test_migrate_can_resume_entity_id_post_migration( patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), patch(CREATE_ENGINE_TARGET, new=_create_engine_test), - patch( - "homeassistant.components.recorder.Recorder._migrate_events_context_ids", - ), - patch( - "homeassistant.components.recorder.Recorder._migrate_states_context_ids", - ), - patch( - "homeassistant.components.recorder.Recorder._migrate_event_type_ids", - ), - patch( - "homeassistant.components.recorder.Recorder._migrate_entity_ids", - ), - patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), - patch( - "homeassistant.components.recorder.Recorder._cleanup_legacy_states_event_ids" + patch.multiple( + "homeassistant.components.recorder.Recorder", + _migrate_events_context_ids=DEFAULT, + _migrate_states_context_ids=DEFAULT, + _migrate_event_type_ids=DEFAULT, + _migrate_entity_ids=DEFAULT, + _post_migrate_entity_ids=DEFAULT, + _cleanup_legacy_states_event_ids=DEFAULT, ), ): async with ( @@ -328,5 +314,143 @@ async def test_migrate_can_resume_entity_id_post_migration( states_indexes = await instance.async_add_executor_job(_get_states_index_names) states_index_names = {index["name"] for index in states_indexes} assert "ix_states_entity_id_last_updated_ts" not in states_index_names + assert "ix_states_event_id" not in states_index_names + + await hass.async_stop() + + +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage +async def test_migrate_can_resume_ix_states_event_id_removed( + async_test_recorder: RecorderInstanceGenerator, + caplog: pytest.LogCaptureFixture, + recorder_db_url: str, +) -> None: + """Test we resume the entity id post migration after a restart. + + This case tests the migration still happens if + ix_states_event_id is removed from the states table. + """ + importlib.import_module(SCHEMA_MODULE) + old_db_schema = sys.modules[SCHEMA_MODULE] + now = dt_util.utcnow() + one_second_past = now - timedelta(seconds=1) + mock_state = State( + "sensor.test", + "old", + {"last_reset": now.isoformat()}, + last_changed=one_second_past, + last_updated=now, + ) + state_changed_event = Event( + EVENT_STATE_CHANGED, + { + "entity_id": "sensor.test", + "old_state": None, + "new_state": mock_state, + }, + EventOrigin.local, + time_fired_timestamp=now.timestamp(), + ) + custom_event = Event( + "custom_event", + {"entity_id": "sensor.custom"}, + EventOrigin.local, + time_fired_timestamp=now.timestamp(), + ) + number_of_migrations = 5 + + def _get_event_id_foreign_keys(): + assert instance.engine is not None + return next( + ( + fk # type: ignore[misc] + for fk in inspect(instance.engine).get_foreign_keys("states") + if fk["constrained_columns"] == ["event_id"] + ), + None, + ) + + def _get_states_index_names(): + with session_scope(hass=hass) as session: + return inspect(session.connection()).get_indexes("states") + + with ( + patch.object(recorder, "db_schema", old_db_schema), + patch.object( + recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION + ), + patch.object(core, "StatesMeta", old_db_schema.StatesMeta), + patch.object(core, "EventTypes", old_db_schema.EventTypes), + patch.object(core, "EventData", old_db_schema.EventData), + patch.object(core, "States", old_db_schema.States), + patch.object(core, "Events", old_db_schema.Events), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + patch.multiple( + "homeassistant.components.recorder.Recorder", + _migrate_events_context_ids=DEFAULT, + _migrate_states_context_ids=DEFAULT, + _migrate_event_type_ids=DEFAULT, + _migrate_entity_ids=DEFAULT, + _post_migrate_entity_ids=DEFAULT, + _cleanup_legacy_states_event_ids=DEFAULT, + ), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + def _add_data(): + with session_scope(hass=hass) as session: + session.add(old_db_schema.Events.from_event(custom_event)) + session.add(old_db_schema.States.from_event(state_changed_event)) + + await instance.async_add_executor_job(_add_data) + await hass.async_block_till_done() + await instance.async_block_till_done() + + await instance.async_add_executor_job( + migration._drop_index, + instance.get_session, + "states", + "ix_states_event_id", + ) + + states_indexes = await instance.async_add_executor_job( + _get_states_index_names + ) + states_index_names = {index["name"] for index in states_indexes} + assert instance.use_legacy_events_index is True + assert ( + await instance.async_add_executor_job(_get_event_id_foreign_keys) + is not None + ) + + await hass.async_stop() + await hass.async_block_till_done() + + assert "ix_states_entity_id_last_updated_ts" in states_index_names + + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await hass.async_block_till_done() + + # We need to wait for all the migration tasks to complete + # before we can check the database. + for _ in range(number_of_migrations): + await instance.async_block_till_done() + await async_wait_recording_done(hass) + + states_indexes = await instance.async_add_executor_job(_get_states_index_names) + states_index_names = {index["name"] for index in states_indexes} + assert "ix_states_entity_id_last_updated_ts" not in states_index_names + assert "ix_states_event_id" not in states_index_names + assert await instance.async_add_executor_job(_get_event_id_foreign_keys) is None await hass.async_stop() diff --git a/tests/components/recorder/test_websocket_api.py b/tests/components/recorder/test_websocket_api.py index 508848b9cc7..7467ebe5c4c 100644 --- a/tests/components/recorder/test_websocket_api.py +++ b/tests/components/recorder/test_websocket_api.py @@ -3,6 +3,7 @@ import datetime from datetime import timedelta from statistics import fmean +import sys from unittest.mock import ANY, patch from freezegun import freeze_time @@ -2515,7 +2516,9 @@ async def test_recorder_info_migration_queue_exhausted( new=create_engine_test, ), patch.object(recorder.core, "MAX_QUEUE_BACKLOG_MIN_VALUE", 1), - patch.object(recorder.core, "QUEUE_PERCENTAGE_ALLOWED_AVAILABLE_MEMORY", 0), + patch.object( + recorder.core, "MIN_AVAILABLE_MEMORY_FOR_QUEUE_BACKLOG", sys.maxsize + ), ): async with async_test_recorder(hass, wait_recorder=False): await hass.async_add_executor_job( diff --git a/tests/components/reolink/test_media_source.py b/tests/components/reolink/test_media_source.py index 0d86106e8e5..66ed32ca823 100644 --- a/tests/components/reolink/test_media_source.py +++ b/tests/components/reolink/test_media_source.py @@ -54,6 +54,7 @@ TEST_FILE_NAME = f"{TEST_YEAR}{TEST_MONTH}{TEST_DAY}{TEST_HOUR}{TEST_MINUTE}00" TEST_FILE_NAME_MP4 = f"{TEST_YEAR}{TEST_MONTH}{TEST_DAY}{TEST_HOUR}{TEST_MINUTE}00.mp4" TEST_STREAM = "main" TEST_CHANNEL = "0" +TEST_CAM_NAME = "Cam new name" TEST_MIME_TYPE = "application/x-mpegURL" TEST_MIME_TYPE_MP4 = "video/mp4" @@ -130,6 +131,7 @@ async def test_browsing( """Test browsing the Reolink three.""" entry_id = config_entry.entry_id reolink_connect.api_version.return_value = 1 + reolink_connect.model = "Reolink TrackMix PoE" with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): assert await hass.config_entries.async_setup(entry_id) is True @@ -137,7 +139,7 @@ async def test_browsing( entries = dr.async_entries_for_config_entry(device_registry, entry_id) assert len(entries) > 0 - device_registry.async_update_device(entries[0].id, name_by_user="Cam new name") + device_registry.async_update_device(entries[0].id, name_by_user=TEST_CAM_NAME) caplog.set_level(logging.DEBUG) @@ -149,6 +151,7 @@ async def test_browsing( assert browse.title == "Reolink" assert browse.identifier is None assert browse.children[0].identifier == browse_root_id + assert browse.children[0].title == f"{TEST_CAM_NAME} lens 0" # browse resolution select browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/{browse_root_id}") diff --git a/tests/components/russound_rio/__init__.py b/tests/components/russound_rio/__init__.py new file mode 100644 index 00000000000..96171071907 --- /dev/null +++ b/tests/components/russound_rio/__init__.py @@ -0,0 +1 @@ +"""Tests for the Russound RIO integration.""" diff --git a/tests/components/russound_rio/conftest.py b/tests/components/russound_rio/conftest.py new file mode 100644 index 00000000000..49cb719dfc2 --- /dev/null +++ b/tests/components/russound_rio/conftest.py @@ -0,0 +1,48 @@ +"""Test fixtures for Russound RIO integration.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.russound_rio.const import DOMAIN +from homeassistant.core import HomeAssistant + +from .const import HARDWARE_MAC, MOCK_CONFIG, MODEL + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry(): + """Prevent setup.""" + with patch( + "homeassistant.components.russound_rio.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: + """Mock a Russound RIO config entry.""" + entry = MockConfigEntry( + domain=DOMAIN, data=MOCK_CONFIG, unique_id=HARDWARE_MAC, title=MODEL + ) + entry.add_to_hass(hass) + return entry + + +@pytest.fixture +def mock_russound() -> Generator[AsyncMock]: + """Mock the Russound RIO client.""" + with ( + patch( + "homeassistant.components.russound_rio.Russound", autospec=True + ) as mock_client, + patch( + "homeassistant.components.russound_rio.config_flow.Russound", + return_value=mock_client, + ), + ): + mock_client.enumerate_controllers.return_value = [(1, HARDWARE_MAC, MODEL)] + yield mock_client diff --git a/tests/components/russound_rio/const.py b/tests/components/russound_rio/const.py new file mode 100644 index 00000000000..92aed6494d9 --- /dev/null +++ b/tests/components/russound_rio/const.py @@ -0,0 +1,11 @@ +"""Constants for russound_rio tests.""" + +HOST = "127.0.0.1" +PORT = 9621 +MODEL = "MCA-C5" +HARDWARE_MAC = "00:11:22:33:44:55" + +MOCK_CONFIG = { + "host": HOST, + "port": PORT, +} diff --git a/tests/components/russound_rio/test_config_flow.py b/tests/components/russound_rio/test_config_flow.py new file mode 100644 index 00000000000..195e4af9b11 --- /dev/null +++ b/tests/components/russound_rio/test_config_flow.py @@ -0,0 +1,135 @@ +"""Test the Russound RIO config flow.""" + +from unittest.mock import AsyncMock + +from homeassistant.components.russound_rio.const import DOMAIN +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .const import HARDWARE_MAC, MOCK_CONFIG, MODEL + + +async def test_form( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound: AsyncMock +) -> None: + """Test we get the form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_CONFIG, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == MODEL + assert result["data"] == MOCK_CONFIG + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_form_cannot_connect( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound: AsyncMock +) -> None: + """Test we handle cannot connect error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + mock_russound.connect.side_effect = TimeoutError + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_CONFIG, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + # Recover with correct information + mock_russound.connect.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_CONFIG, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == MODEL + assert result["data"] == MOCK_CONFIG + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_no_primary_controller( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound: AsyncMock +) -> None: + """Test we handle no primary controller error.""" + mock_russound.enumerate_controllers.return_value = [] + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + user_input = MOCK_CONFIG + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "no_primary_controller"} + + # Recover with correct information + mock_russound.enumerate_controllers.return_value = [(1, HARDWARE_MAC, MODEL)] + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_CONFIG, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == MODEL + assert result["data"] == MOCK_CONFIG + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_import( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound: AsyncMock +) -> None: + """Test we import a config entry.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=MOCK_CONFIG, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == MODEL + assert result["data"] == MOCK_CONFIG + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_import_cannot_connect( + hass: HomeAssistant, mock_russound: AsyncMock +) -> None: + """Test we handle import cannot connect error.""" + mock_russound.connect.side_effect = TimeoutError + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=MOCK_CONFIG + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" + + +async def test_import_no_primary_controller( + hass: HomeAssistant, mock_russound: AsyncMock +) -> None: + """Test import with no primary controller error.""" + mock_russound.enumerate_controllers.return_value = [] + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=MOCK_CONFIG + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "no_primary_controller" diff --git a/tests/components/search/test_init.py b/tests/components/search/test_init.py index a817fbfc39e..9b2b959e0dd 100644 --- a/tests/components/search/test_init.py +++ b/tests/components/search/test_init.py @@ -534,12 +534,14 @@ async def test_search( ItemType.DEVICE: {wled_device.id}, ItemType.ENTITY: {wled_segment_1_entity.entity_id}, ItemType.FLOOR: {first_floor.floor_id}, + ItemType.INTEGRATION: {"wled"}, } assert search(ItemType.AUTOMATION, "automation.wled_device") == { ItemType.AREA: {living_room_area.id}, ItemType.CONFIG_ENTRY: {wled_config_entry.entry_id}, ItemType.DEVICE: {wled_device.id}, ItemType.FLOOR: {first_floor.floor_id}, + ItemType.INTEGRATION: {"wled"}, } assert search(ItemType.AUTOMATION, "automation.floor") == { ItemType.FLOOR: {first_floor.floor_id}, @@ -561,6 +563,7 @@ async def test_search( }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, ItemType.GROUP: {"group.wled_hue"}, + ItemType.INTEGRATION: {"hue", "wled"}, } assert search(ItemType.AUTOMATION, "automation.scene") == { ItemType.AREA: {bedroom_area.id, kitchen_area.id, living_room_area.id}, @@ -574,6 +577,7 @@ async def test_search( scene_wled_hue_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, + ItemType.INTEGRATION: {"hue", "wled"}, ItemType.SCENE: {scene_wled_hue_entity.entity_id}, } assert search(ItemType.AUTOMATION, "automation.script") == { @@ -589,6 +593,7 @@ async def test_search( script_scene_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, + ItemType.INTEGRATION: {"hue", "wled"}, ItemType.SCENE: {scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {script_scene_entity.entity_id}, } @@ -611,6 +616,7 @@ async def test_search( }, ItemType.FLOOR: {first_floor.floor_id}, ItemType.GROUP: {"group.hue", "group.wled_hue"}, + ItemType.INTEGRATION: {"hue"}, ItemType.SCENE: {"scene.scene_hue_seg_1", scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {"script.device", "script.hue"}, } @@ -624,6 +630,7 @@ async def test_search( }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, ItemType.GROUP: {"group.wled", "group.wled_hue"}, + ItemType.INTEGRATION: {"wled"}, ItemType.SCENE: {"scene.scene_wled_seg_1", scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {"script.wled"}, } @@ -639,6 +646,7 @@ async def test_search( }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, ItemType.GROUP: {"group.wled", "group.wled_hue"}, + ItemType.INTEGRATION: {"wled"}, ItemType.LABEL: {label_christmas.label_id}, ItemType.SCENE: {"scene.scene_wled_seg_1", scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {"script.wled"}, @@ -652,6 +660,7 @@ async def test_search( }, ItemType.FLOOR: {first_floor.floor_id}, ItemType.GROUP: {"group.hue", "group.wled_hue"}, + ItemType.INTEGRATION: {"hue"}, ItemType.SCENE: {"scene.scene_hue_seg_1", scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {"script.device", "script.hue"}, } @@ -664,6 +673,7 @@ async def test_search( ItemType.DEVICE: {wled_device.id}, ItemType.FLOOR: {first_floor.floor_id}, ItemType.GROUP: {"group.wled", "group.wled_hue"}, + ItemType.INTEGRATION: {"wled"}, ItemType.SCENE: {"scene.scene_wled_seg_1", scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {"script.wled"}, } @@ -673,6 +683,7 @@ async def test_search( ItemType.DEVICE: {wled_device.id}, ItemType.FLOOR: {second_floor.floor_id}, ItemType.GROUP: {"group.wled", "group.wled_hue"}, + ItemType.INTEGRATION: {"wled"}, ItemType.SCENE: {scene_wled_hue_entity.entity_id}, } assert search(ItemType.ENTITY, hue_segment_1_entity.entity_id) == { @@ -681,6 +692,7 @@ async def test_search( ItemType.DEVICE: {hue_device.id}, ItemType.FLOOR: {first_floor.floor_id}, ItemType.GROUP: {"group.hue", "group.wled_hue"}, + ItemType.INTEGRATION: {"hue"}, ItemType.LABEL: {label_energy.label_id}, ItemType.SCENE: {"scene.scene_hue_seg_1", scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {"script.hue"}, @@ -691,6 +703,7 @@ async def test_search( ItemType.DEVICE: {hue_device.id}, ItemType.FLOOR: {first_floor.floor_id}, ItemType.GROUP: {"group.hue", "group.wled_hue"}, + ItemType.INTEGRATION: {"hue"}, ItemType.SCENE: {scene_wled_hue_entity.entity_id}, } assert not search(ItemType.ENTITY, "automation.wled") @@ -722,6 +735,7 @@ async def test_search( } assert search(ItemType.ENTITY, "light.wled_config_entry_source") == { ItemType.CONFIG_ENTRY: {wled_config_entry.entry_id}, + ItemType.INTEGRATION: {"wled"}, } assert not search(ItemType.FLOOR, "unknown") @@ -780,6 +794,7 @@ async def test_search( wled_segment_2_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, + ItemType.INTEGRATION: {"wled"}, } assert search(ItemType.GROUP, "group.hue") == { ItemType.AREA: {kitchen_area.id}, @@ -790,6 +805,7 @@ async def test_search( hue_segment_2_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id}, + ItemType.INTEGRATION: {"hue"}, } assert search(ItemType.GROUP, "group.wled_hue") == { ItemType.AREA: {bedroom_area.id, living_room_area.id, kitchen_area.id}, @@ -803,6 +819,7 @@ async def test_search( hue_segment_2_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, + ItemType.INTEGRATION: {"hue", "wled"}, ItemType.SCRIPT: {"script.group"}, } @@ -841,6 +858,7 @@ async def test_search( ItemType.DEVICE: {wled_device.id}, ItemType.ENTITY: {wled_segment_1_entity.entity_id}, ItemType.FLOOR: {first_floor.floor_id}, + ItemType.INTEGRATION: {"wled"}, } assert search(ItemType.SCENE, "scene.scene_hue_seg_1") == { ItemType.AREA: {kitchen_area.id}, @@ -848,6 +866,7 @@ async def test_search( ItemType.DEVICE: {hue_device.id}, ItemType.ENTITY: {hue_segment_1_entity.entity_id}, ItemType.FLOOR: {first_floor.floor_id}, + ItemType.INTEGRATION: {"hue"}, } assert search(ItemType.SCENE, scene_wled_hue_entity.entity_id) == { ItemType.AREA: {bedroom_area.id, living_room_area.id, kitchen_area.id}, @@ -861,6 +880,7 @@ async def test_search( hue_segment_2_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, + ItemType.INTEGRATION: {"hue", "wled"}, ItemType.LABEL: {label_other.label_id}, ItemType.SCRIPT: {script_scene_entity.entity_id}, } @@ -880,6 +900,7 @@ async def test_search( ItemType.DEVICE: {wled_device.id}, ItemType.ENTITY: {wled_segment_1_entity.entity_id}, ItemType.FLOOR: {first_floor.floor_id}, + ItemType.INTEGRATION: {"wled"}, } assert search(ItemType.SCRIPT, "script.hue") == { ItemType.AREA: {kitchen_area.id}, @@ -887,6 +908,7 @@ async def test_search( ItemType.DEVICE: {hue_device.id}, ItemType.ENTITY: {hue_segment_1_entity.entity_id}, ItemType.FLOOR: {first_floor.floor_id}, + ItemType.INTEGRATION: {"hue"}, } assert search(ItemType.SCRIPT, "script.script_with_templated_services") == {} assert search(ItemType.SCRIPT, "script.device") == { @@ -894,6 +916,7 @@ async def test_search( ItemType.CONFIG_ENTRY: {hue_config_entry.entry_id}, ItemType.DEVICE: {hue_device.id}, ItemType.FLOOR: {first_floor.floor_id}, + ItemType.INTEGRATION: {"hue"}, } assert search(ItemType.SCRIPT, "script.floor") == { ItemType.FLOOR: {first_floor.floor_id}, @@ -915,6 +938,7 @@ async def test_search( }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, ItemType.GROUP: {"group.wled_hue"}, + ItemType.INTEGRATION: {"hue", "wled"}, } assert search(ItemType.SCRIPT, script_scene_entity.entity_id) == { ItemType.AREA: {bedroom_area.id, kitchen_area.id, living_room_area.id}, @@ -928,6 +952,7 @@ async def test_search( scene_wled_hue_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, + ItemType.INTEGRATION: {"hue", "wled"}, ItemType.LABEL: {label_other.label_id}, ItemType.SCENE: {scene_wled_hue_entity.entity_id}, } @@ -944,6 +969,7 @@ async def test_search( script_scene_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, + ItemType.INTEGRATION: {"hue", "wled"}, ItemType.SCENE: {scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {script_scene_entity.entity_id}, } @@ -981,6 +1007,7 @@ async def test_search( ), ItemType.CONFIG_ENTRY: [hue_config_entry.entry_id], ItemType.FLOOR: [first_floor.floor_id], + ItemType.INTEGRATION: ["hue"], ItemType.SCENE: unordered( ["scene.scene_hue_seg_1", scene_wled_hue_entity.entity_id] ), diff --git a/tests/components/sensor/test_init.py b/tests/components/sensor/test_init.py index 034360c6cd2..2504ea80d84 100644 --- a/tests/components/sensor/test_init.py +++ b/tests/components/sensor/test_init.py @@ -942,7 +942,21 @@ async def test_custom_unit_change( "1000000", "1093613", SensorDeviceClass.DISTANCE, - ) + ), + # Volume Storage (subclass of Volume) + ( + US_CUSTOMARY_SYSTEM, + UnitOfVolume.LITERS, + UnitOfVolume.GALLONS, + UnitOfVolume.GALLONS, + UnitOfVolume.FLUID_OUNCES, + 1000, + "1000", + "264", + "264", + "33814", + SensorDeviceClass.VOLUME_STORAGE, + ), ], ) async def test_unit_conversion_priority( diff --git a/tests/components/shelly/__init__.py b/tests/components/shelly/__init__.py index 4631a17969e..7de45eeee98 100644 --- a/tests/components/shelly/__init__.py +++ b/tests/components/shelly/__init__.py @@ -23,6 +23,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import ( CONNECTION_NETWORK_MAC, + DeviceEntry, DeviceRegistry, format_mac, ) @@ -111,6 +112,7 @@ def register_entity( unique_id: str, config_entry: ConfigEntry | None = None, capabilities: Mapping[str, Any] | None = None, + device_id: str | None = None, ) -> str: """Register enabled entity, return entity_id.""" entity_registry = er.async_get(hass) @@ -122,6 +124,7 @@ def register_entity( disabled_by=None, config_entry=config_entry, capabilities=capabilities, + device_id=device_id, ) return f"{domain}.{object_id}" @@ -145,9 +148,11 @@ def get_entity_state(hass: HomeAssistant, entity_id: str) -> str: return entity.state -def register_device(device_registry: DeviceRegistry, config_entry: ConfigEntry) -> None: +def register_device( + device_registry: DeviceRegistry, config_entry: ConfigEntry +) -> DeviceEntry: """Register Shelly device.""" - device_registry.async_get_or_create( + return device_registry.async_get_or_create( config_entry_id=config_entry.entry_id, connections={(CONNECTION_NETWORK_MAC, format_mac(MOCK_MAC))}, ) diff --git a/tests/components/shelly/test_binary_sensor.py b/tests/components/shelly/test_binary_sensor.py index 3bfbf350f7e..b90d89b8e48 100644 --- a/tests/components/shelly/test_binary_sensor.py +++ b/tests/components/shelly/test_binary_sensor.py @@ -1,5 +1,6 @@ """Tests for Shelly binary sensor platform.""" +from copy import deepcopy from unittest.mock import Mock from aioshelly.const import MODEL_MOTION @@ -10,6 +11,7 @@ from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAI from homeassistant.components.shelly.const import UPDATE_PERIOD_MULTIPLIER from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN from homeassistant.core import HomeAssistant, State +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import DeviceRegistry from homeassistant.helpers.entity_registry import EntityRegistry @@ -353,3 +355,104 @@ async def test_rpc_restored_sleeping_binary_sensor_no_last_state( await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_OFF + + +@pytest.mark.parametrize( + ("name", "entity_id"), + [ + ("Virtual binary sensor", "binary_sensor.test_name_virtual_binary_sensor"), + (None, "binary_sensor.test_name_boolean_203"), + ], +) +async def test_rpc_device_virtual_binary_sensor( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + name: str | None, + entity_id: str, +) -> None: + """Test a virtual binary sensor for RPC device.""" + config = deepcopy(mock_rpc_device.config) + config["boolean:203"] = { + "name": name, + "meta": {"ui": {"view": "label"}}, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["boolean:203"] = {"value": True} + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 3) + + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_ON + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == "123456789ABC-boolean:203-boolean" + + monkeypatch.setitem(mock_rpc_device.status["boolean:203"], "value", False) + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == STATE_OFF + + +async def test_rpc_remove_virtual_binary_sensor_when_mode_toggle( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test if the virtual binary sensor will be removed if the mode has been changed to a toggle.""" + config = deepcopy(mock_rpc_device.config) + config["boolean:200"] = {"name": None, "meta": {"ui": {"view": "toggle"}}} + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["boolean:200"] = {"value": True} + monkeypatch.setattr(mock_rpc_device, "status", status) + + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + BINARY_SENSOR_DOMAIN, + "test_name_boolean_200", + "boolean:200-boolean", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry + + +async def test_rpc_remove_virtual_binary_sensor_when_orphaned( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, +) -> None: + """Check whether the virtual binary sensor will be removed if it has been removed from the device configuration.""" + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + BINARY_SENSOR_DOMAIN, + "test_name_boolean_200", + "boolean:200-boolean", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry diff --git a/tests/components/shelly/test_number.py b/tests/components/shelly/test_number.py index ff453b3251c..73f432094b9 100644 --- a/tests/components/shelly/test_number.py +++ b/tests/components/shelly/test_number.py @@ -1,18 +1,24 @@ """Tests for Shelly number platform.""" +from copy import deepcopy from unittest.mock import AsyncMock, Mock from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError import pytest from homeassistant.components.number import ( + ATTR_MAX, + ATTR_MIN, + ATTR_MODE, + ATTR_STEP, ATTR_VALUE, DOMAIN as NUMBER_DOMAIN, SERVICE_SET_VALUE, + NumberMode, ) from homeassistant.components.shelly.const import DOMAIN from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN +from homeassistant.const import ATTR_ENTITY_ID, ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN from homeassistant.core import HomeAssistant, State from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceRegistry @@ -240,3 +246,145 @@ async def test_block_set_value_auth_error( assert "context" in flow assert flow["context"].get("source") == SOURCE_REAUTH assert flow["context"].get("entry_id") == entry.entry_id + + +@pytest.mark.parametrize( + ("name", "entity_id", "original_unit", "expected_unit", "view", "mode"), + [ + ( + "Virtual number", + "number.test_name_virtual_number", + "%", + "%", + "field", + NumberMode.BOX, + ), + (None, "number.test_name_number_203", "", None, "field", NumberMode.BOX), + ( + "Virtual slider", + "number.test_name_virtual_slider", + "Hz", + "Hz", + "slider", + NumberMode.SLIDER, + ), + ], +) +async def test_rpc_device_virtual_number( + hass: HomeAssistant, + entity_registry: EntityRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + name: str | None, + entity_id: str, + original_unit: str, + expected_unit: str | None, + view: str, + mode: NumberMode, +) -> None: + """Test a virtual number for RPC device.""" + config = deepcopy(mock_rpc_device.config) + config["number:203"] = { + "name": name, + "min": 0, + "max": 100, + "meta": {"ui": {"step": 0.1, "unit": original_unit, "view": view}}, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["number:203"] = {"value": 12.3} + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 3) + + state = hass.states.get(entity_id) + assert state + assert state.state == "12.3" + assert state.attributes.get(ATTR_MIN) == 0 + assert state.attributes.get(ATTR_MAX) == 100 + assert state.attributes.get(ATTR_STEP) == 0.1 + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit + assert state.attributes.get(ATTR_MODE) is mode + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == "123456789ABC-number:203-number" + + monkeypatch.setitem(mock_rpc_device.status["number:203"], "value", 78.9) + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == "78.9" + + monkeypatch.setitem(mock_rpc_device.status["number:203"], "value", 56.7) + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 56.7}, + blocking=True, + ) + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == "56.7" + + +async def test_rpc_remove_virtual_number_when_mode_label( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test if the virtual number will be removed if the mode has been changed to a label.""" + config = deepcopy(mock_rpc_device.config) + config["number:200"] = { + "name": None, + "min": -1000, + "max": 1000, + "meta": {"ui": {"step": 1, "unit": "", "view": "label"}}, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["number:200"] = {"value": 123} + monkeypatch.setattr(mock_rpc_device, "status", status) + + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + NUMBER_DOMAIN, + "test_name_number_200", + "number:200-number", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry + + +async def test_rpc_remove_virtual_number_when_orphaned( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, +) -> None: + """Check whether the virtual number will be removed if it has been removed from the device configuration.""" + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + NUMBER_DOMAIN, + "test_name_number_200", + "number:200-number", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry diff --git a/tests/components/shelly/test_sensor.py b/tests/components/shelly/test_sensor.py index c62a1f6f6ca..00b9d548dfd 100644 --- a/tests/components/shelly/test_sensor.py +++ b/tests/components/shelly/test_sensor.py @@ -854,3 +854,215 @@ async def test_rpc_disabled_xfreq( entry = entity_registry.async_get(entity_id) assert not entry + + +@pytest.mark.parametrize( + ("name", "entity_id"), + [ + ("Virtual sensor", "sensor.test_name_virtual_sensor"), + (None, "sensor.test_name_text_203"), + ], +) +async def test_rpc_device_virtual_text_sensor( + hass: HomeAssistant, + entity_registry: EntityRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + name: str | None, + entity_id: str, +) -> None: + """Test a virtual text sensor for RPC device.""" + config = deepcopy(mock_rpc_device.config) + config["text:203"] = { + "name": name, + "meta": {"ui": {"view": "label"}}, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["text:203"] = {"value": "lorem ipsum"} + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 3) + + state = hass.states.get(entity_id) + assert state + assert state.state == "lorem ipsum" + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == "123456789ABC-text:203-text" + + monkeypatch.setitem(mock_rpc_device.status["text:203"], "value", "dolor sit amet") + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == "dolor sit amet" + + +async def test_rpc_remove_text_virtual_sensor_when_mode_field( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test if the virtual text sensor will be removed if the mode has been changed to a field.""" + config = deepcopy(mock_rpc_device.config) + config["text:200"] = {"name": None, "meta": {"ui": {"view": "field"}}} + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["text:200"] = {"value": "lorem ipsum"} + monkeypatch.setattr(mock_rpc_device, "status", status) + + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + SENSOR_DOMAIN, + "test_name_text_200", + "text:200-text", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry + + +async def test_rpc_remove_text_virtual_sensor_when_orphaned( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, +) -> None: + """Check whether the virtual text sensor will be removed if it has been removed from the device configuration.""" + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + SENSOR_DOMAIN, + "test_name_text_200", + "text:200-text", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry + + +@pytest.mark.parametrize( + ("name", "entity_id", "original_unit", "expected_unit"), + [ + ("Virtual number sensor", "sensor.test_name_virtual_number_sensor", "W", "W"), + (None, "sensor.test_name_number_203", "", None), + ], +) +async def test_rpc_device_virtual_number_sensor( + hass: HomeAssistant, + entity_registry: EntityRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + name: str | None, + entity_id: str, + original_unit: str, + expected_unit: str | None, +) -> None: + """Test a virtual number sensor for RPC device.""" + config = deepcopy(mock_rpc_device.config) + config["number:203"] = { + "name": name, + "min": 0, + "max": 100, + "meta": {"ui": {"step": 0.1, "unit": original_unit, "view": "label"}}, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["number:203"] = {"value": 34.5} + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 3) + + state = hass.states.get(entity_id) + assert state + assert state.state == "34.5" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == "123456789ABC-number:203-number" + + monkeypatch.setitem(mock_rpc_device.status["number:203"], "value", 56.7) + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == "56.7" + + +async def test_rpc_remove_number_virtual_sensor_when_mode_field( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test if the virtual number sensor will be removed if the mode has been changed to a field.""" + config = deepcopy(mock_rpc_device.config) + config["number:200"] = { + "name": None, + "min": 0, + "max": 100, + "meta": {"ui": {"step": 1, "unit": "", "view": "field"}}, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["number:200"] = {"value": 67.8} + monkeypatch.setattr(mock_rpc_device, "status", status) + + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + SENSOR_DOMAIN, + "test_name_number_200", + "number:200-number", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry + + +async def test_rpc_remove_number_virtual_sensor_when_orphaned( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, +) -> None: + """Check whether the virtual number sensor will be removed if it has been removed from the device configuration.""" + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + SENSOR_DOMAIN, + "test_name_number_200", + "number:200-number", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry diff --git a/tests/components/shelly/test_switch.py b/tests/components/shelly/test_switch.py index de87d11d255..124562be8d5 100644 --- a/tests/components/shelly/test_switch.py +++ b/tests/components/shelly/test_switch.py @@ -25,6 +25,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, State from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import DeviceRegistry from homeassistant.helpers.entity_registry import EntityRegistry @@ -430,3 +431,142 @@ async def test_wall_display_relay_mode( entry = entity_registry.async_get(switch_entity_id) assert entry assert entry.unique_id == "123456789ABC-switch:0" + + +@pytest.mark.parametrize( + ("name", "entity_id"), + [ + ("Virtual switch", "switch.test_name_virtual_switch"), + (None, "switch.test_name_boolean_200"), + ], +) +async def test_rpc_device_virtual_switch( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + name: str | None, + entity_id: str, +) -> None: + """Test a virtual switch for RPC device.""" + config = deepcopy(mock_rpc_device.config) + config["boolean:200"] = { + "name": name, + "meta": {"ui": {"view": "toggle"}}, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["boolean:200"] = {"value": True} + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 3) + + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_ON + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == "123456789ABC-boolean:200-boolean" + + monkeypatch.setitem(mock_rpc_device.status["boolean:200"], "value", False) + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == STATE_OFF + + monkeypatch.setitem(mock_rpc_device.status["boolean:200"], "value", True) + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == STATE_ON + + +async def test_rpc_device_virtual_binary_sensor( + hass: HomeAssistant, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test that a switch entity has not been created for a virtual binary sensor.""" + config = deepcopy(mock_rpc_device.config) + config["boolean:200"] = {"name": None, "meta": {"ui": {"view": "label"}}} + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["boolean:200"] = {"value": True} + monkeypatch.setattr(mock_rpc_device, "status", status) + + entity_id = "switch.test_name_boolean_200" + + await init_integration(hass, 3) + + state = hass.states.get(entity_id) + assert not state + + +async def test_rpc_remove_virtual_switch_when_mode_label( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test if the virtual switch will be removed if the mode has been changed to a label.""" + config = deepcopy(mock_rpc_device.config) + config["boolean:200"] = {"name": None, "meta": {"ui": {"view": "label"}}} + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["boolean:200"] = {"value": True} + monkeypatch.setattr(mock_rpc_device, "status", status) + + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + SWITCH_DOMAIN, + "test_name_boolean_200", + "boolean:200-boolean", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry + + +async def test_rpc_remove_virtual_switch_when_orphaned( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, +) -> None: + """Check whether the virtual switch will be removed if it has been removed from the device configuration.""" + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + SWITCH_DOMAIN, + "test_name_boolean_200", + "boolean:200-boolean", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry diff --git a/tests/components/shelly/test_text.py b/tests/components/shelly/test_text.py new file mode 100644 index 00000000000..19acb856f35 --- /dev/null +++ b/tests/components/shelly/test_text.py @@ -0,0 +1,129 @@ +"""Tests for Shelly text platform.""" + +from copy import deepcopy +from unittest.mock import Mock + +import pytest + +from homeassistant.components.text import ( + ATTR_VALUE, + DOMAIN as TEXT_PLATFORM, + SERVICE_SET_VALUE, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceRegistry +from homeassistant.helpers.entity_registry import EntityRegistry + +from . import init_integration, register_device, register_entity + + +@pytest.mark.parametrize( + ("name", "entity_id"), + [ + ("Virtual text", "text.test_name_virtual_text"), + (None, "text.test_name_text_203"), + ], +) +async def test_rpc_device_virtual_text( + hass: HomeAssistant, + entity_registry: EntityRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + name: str | None, + entity_id: str, +) -> None: + """Test a virtual text for RPC device.""" + config = deepcopy(mock_rpc_device.config) + config["text:203"] = { + "name": name, + "meta": {"ui": {"view": "field"}}, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["text:203"] = {"value": "lorem ipsum"} + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 3) + + state = hass.states.get(entity_id) + assert state + assert state.state == "lorem ipsum" + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == "123456789ABC-text:203-text" + + monkeypatch.setitem(mock_rpc_device.status["text:203"], "value", "dolor sit amet") + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == "dolor sit amet" + + monkeypatch.setitem(mock_rpc_device.status["text:203"], "value", "sed do eiusmod") + await hass.services.async_call( + TEXT_PLATFORM, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: "sed do eiusmod"}, + blocking=True, + ) + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == "sed do eiusmod" + + +async def test_rpc_remove_virtual_text_when_mode_label( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test if the virtual text will be removed if the mode has been changed to a label.""" + config = deepcopy(mock_rpc_device.config) + config["text:200"] = {"name": None, "meta": {"ui": {"view": "label"}}} + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["text:200"] = {"value": "lorem ipsum"} + monkeypatch.setattr(mock_rpc_device, "status", status) + + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + TEXT_PLATFORM, + "test_name_text_200", + "text:200-text", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry + + +async def test_rpc_remove_virtual_text_when_orphaned( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, +) -> None: + """Check whether the virtual text will be removed if it has been removed from the device configuration.""" + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + TEXT_PLATFORM, + "test_name_text_200", + "text:200-text", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry diff --git a/tests/components/shelly/test_utils.py b/tests/components/shelly/test_utils.py index 7c4ea8accae..5891f250fae 100644 --- a/tests/components/shelly/test_utils.py +++ b/tests/components/shelly/test_utils.py @@ -23,6 +23,7 @@ from homeassistant.components.shelly.utils import ( get_block_device_sleep_period, get_block_input_triggers, get_device_uptime, + get_host, get_number_of_channels, get_release_url, get_rpc_channel_name, @@ -274,3 +275,19 @@ def test_get_release_url( result = get_release_url(gen, model, beta) assert result is expected + + +@pytest.mark.parametrize( + ("host", "expected"), + [ + ("shelly_device.local", "shelly_device.local"), + ("192.168.178.12", "192.168.178.12"), + ( + "2001:0db8:85a3:0000:0000:8a2e:0370:7334", + "[2001:0db8:85a3:0000:0000:8a2e:0370:7334]", + ), + ], +) +def test_get_host(host: str, expected: str) -> None: + """Test get_host function.""" + assert get_host(host) == expected diff --git a/tests/components/shopping_list/test_todo.py b/tests/components/shopping_list/test_todo.py index 173544d0be2..f10479adf6c 100644 --- a/tests/components/shopping_list/test_todo.py +++ b/tests/components/shopping_list/test_todo.py @@ -5,7 +5,14 @@ from typing import Any import pytest -from homeassistant.components.todo import DOMAIN as TODO_DOMAIN +from homeassistant.components.todo import ( + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, + DOMAIN as TODO_DOMAIN, + TodoServices, +) +from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError @@ -98,11 +105,11 @@ async def test_add_item( """Test adding shopping_list item and listing it.""" await hass.services.async_call( TODO_DOMAIN, - "add_item", + TodoServices.ADD_ITEM, { - "item": "soda", + ATTR_ITEM: "soda", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -125,9 +132,9 @@ async def test_remove_item( """Test removing a todo item.""" await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "soda"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "soda"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) items = await ws_get_items() @@ -142,11 +149,11 @@ async def test_remove_item( await hass.services.async_call( TODO_DOMAIN, - "remove_item", + TodoServices.REMOVE_ITEM, { - "item": [items[0]["uid"]], + ATTR_ITEM: [items[0]["uid"]], }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -168,11 +175,11 @@ async def test_bulk_remove( for _i in range(5): await hass.services.async_call( TODO_DOMAIN, - "add_item", + TodoServices.ADD_ITEM, { - "item": "soda", + ATTR_ITEM: "soda", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -186,11 +193,11 @@ async def test_bulk_remove( await hass.services.async_call( TODO_DOMAIN, - "remove_item", + TodoServices.REMOVE_ITEM, { - "item": uids, + ATTR_ITEM: uids, }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -212,11 +219,11 @@ async def test_update_item( # Create new item await hass.services.async_call( TODO_DOMAIN, - "add_item", + TodoServices.ADD_ITEM, { - "item": "soda", + ATTR_ITEM: "soda", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -234,12 +241,12 @@ async def test_update_item( # Mark item completed await hass.services.async_call( TODO_DOMAIN, - "update_item", + TodoServices.UPDATE_ITEM, { - "item": "soda", - "status": "completed", + ATTR_ITEM: "soda", + ATTR_STATUS: "completed", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -265,11 +272,11 @@ async def test_partial_update_item( # Create new item await hass.services.async_call( TODO_DOMAIN, - "add_item", + TodoServices.ADD_ITEM, { - "item": "soda", + ATTR_ITEM: "soda", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -287,12 +294,12 @@ async def test_partial_update_item( # Mark item completed without changing the summary await hass.services.async_call( TODO_DOMAIN, - "update_item", + TodoServices.UPDATE_ITEM, { - "item": item["uid"], - "status": "completed", + ATTR_ITEM: item["uid"], + ATTR_STATUS: "completed", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -310,12 +317,12 @@ async def test_partial_update_item( # Change the summary without changing the status await hass.services.async_call( TODO_DOMAIN, - "update_item", + TodoServices.UPDATE_ITEM, { - "item": item["uid"], - "rename": "other summary", + ATTR_ITEM: item["uid"], + ATTR_RENAME: "other summary", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -341,12 +348,12 @@ async def test_update_invalid_item( with pytest.raises(ServiceValidationError, match="Unable to find"): await hass.services.async_call( TODO_DOMAIN, - "update_item", + TodoServices.UPDATE_ITEM, { - "item": "invalid-uid", - "rename": "Example task", + ATTR_ITEM: "invalid-uid", + ATTR_RENAME: "Example task", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -391,11 +398,11 @@ async def test_move_item( for i in range(1, 5): await hass.services.async_call( TODO_DOMAIN, - "add_item", + TodoServices.ADD_ITEM, { - "item": f"item {i}", + ATTR_ITEM: f"item {i}", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -429,9 +436,9 @@ async def test_move_invalid_item( await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "soda"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "soda"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -456,11 +463,11 @@ async def test_subscribe_item( # Create new item await hass.services.async_call( TODO_DOMAIN, - "add_item", + TodoServices.ADD_ITEM, { - "item": "soda", + ATTR_ITEM: "soda", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -491,12 +498,12 @@ async def test_subscribe_item( # Rename item item completed await hass.services.async_call( TODO_DOMAIN, - "update_item", + TodoServices.UPDATE_ITEM, { - "item": "soda", - "rename": "milk", + ATTR_ITEM: "soda", + ATTR_RENAME: "milk", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) diff --git a/tests/components/template/snapshots/test_select.ambr b/tests/components/template/snapshots/test_select.ambr new file mode 100644 index 00000000000..d4cabb2900f --- /dev/null +++ b/tests/components/template/snapshots/test_select.ambr @@ -0,0 +1,19 @@ +# serializer version: 1 +# name: test_setup_config_entry + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'My template', + 'options': Wrapper([ + 'off', + 'on', + 'auto', + ]), + }), + 'context': , + 'entity_id': 'select.my_template', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- \ No newline at end of file diff --git a/tests/components/template/snapshots/test_switch.ambr b/tests/components/template/snapshots/test_switch.ambr new file mode 100644 index 00000000000..c240a9436a0 --- /dev/null +++ b/tests/components/template/snapshots/test_switch.ambr @@ -0,0 +1,14 @@ +# serializer version: 1 +# name: test_setup_config_entry + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'My template', + }), + 'context': , + 'entity_id': 'switch.my_template', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/template/test_config_flow.py b/tests/components/template/test_config_flow.py index 10f7d45637f..ff5db52d667 100644 --- a/tests/components/template/test_config_flow.py +++ b/tests/components/template/test_config_flow.py @@ -81,8 +81,39 @@ from tests.typing import WebSocketGenerator }, {}, ), + ( + "image", + {"url": "{{ states('sensor.one') }}"}, + "2024-07-09T00:00:00+00:00", + {"one": "http://www.test.com", "two": ""}, + {}, + {"verify_ssl": True}, + {"verify_ssl": True}, + {}, + ), + ( + "select", + {"state": "{{ states('select.one') }}"}, + "on", + {"one": "on", "two": "off"}, + {}, + {"options": "{{ ['off', 'on', 'auto'] }}"}, + {"options": "{{ ['off', 'on', 'auto'] }}"}, + {}, + ), + ( + "switch", + {"value_template": "{{ states('switch.one') }}"}, + "on", + {"one": "on", "two": "off"}, + {}, + {}, + {}, + {}, + ), ], ) +@pytest.mark.freeze_time("2024-07-09 00:00:00+00:00") async def test_config_flow( hass: HomeAssistant, template_type, @@ -175,12 +206,32 @@ async def test_config_flow( {}, {}, ), + ( + "switch", + {"value_template": "{{ false }}"}, + {}, + {}, + ), ( "button", {}, {}, {}, ), + ( + "image", + { + "url": "{{ states('sensor.one') }}", + }, + {"verify_ssl": True}, + {"verify_ssl": True}, + ), + ( + "select", + {"state": "{{ states('select.one') }}"}, + {"options": "{{ ['off', 'on', 'auto'] }}"}, + {"options": "{{ ['off', 'on', 'auto'] }}"}, + ), ], ) async def test_config_flow_device( @@ -276,6 +327,7 @@ def get_suggested(schema, key): "input_states", "extra_options", "options_options", + "key_template", ), [ ( @@ -290,6 +342,7 @@ def get_suggested(schema, key): {"one": "on", "two": "off"}, {}, {}, + "state", ), ( "sensor", @@ -303,6 +356,7 @@ def get_suggested(schema, key): {"one": "30.0", "two": "20.0"}, {}, {}, + "state", ), ( "button", @@ -329,9 +383,48 @@ def get_suggested(schema, key): } ], }, + "state", + ), + ( + "image", + { + "url": "{{ states('sensor.one') }}", + }, + { + "url": "{{ states('sensor.two') }}", + }, + ["2024-07-09T00:00:00+00:00", "2024-07-09T00:00:00+00:00"], + {"one": "http://www.test.com", "two": "http://www.test2.com"}, + {"verify_ssl": True}, + { + "url": "{{ states('sensor.two') }}", + "verify_ssl": True, + }, + "url", + ), + ( + "select", + {"state": "{{ states('select.one') }}"}, + {"state": "{{ states('select.two') }}"}, + ["on", "off"], + {"one": "on", "two": "off"}, + {"options": "{{ ['off', 'on', 'auto'] }}"}, + {"options": "{{ ['off', 'on', 'auto'] }}"}, + "state", + ), + ( + "switch", + {"value_template": "{{ states('switch.one') }}"}, + {"value_template": "{{ states('switch.two') }}"}, + ["on", "off"], + {"one": "on", "two": "off"}, + {}, + {}, + "value_template", ), ], ) +@pytest.mark.freeze_time("2024-07-09 00:00:00+00:00") async def test_options( hass: HomeAssistant, template_type, @@ -341,6 +434,7 @@ async def test_options( input_states, extra_options, options_options, + key_template, ) -> None: """Test reconfiguring.""" input_entities = ["one", "two"] @@ -375,13 +469,16 @@ async def test_options( assert result["type"] is FlowResultType.FORM assert result["step_id"] == template_type assert get_suggested( - result["data_schema"].schema, "state" - ) == old_state_template.get("state") + result["data_schema"].schema, key_template + ) == old_state_template.get(key_template) assert "name" not in result["data_schema"].schema result = await hass.config_entries.options.async_configure( result["flow_id"], - user_input={**new_state_template, **options_options}, + user_input={ + **new_state_template, + **options_options, + }, ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { @@ -419,7 +516,7 @@ async def test_options( assert result["step_id"] == template_type assert get_suggested(result["data_schema"].schema, "name") is None - assert get_suggested(result["data_schema"].schema, "state") is None + assert get_suggested(result["data_schema"].schema, key_template) is None @pytest.mark.parametrize( @@ -1050,6 +1147,27 @@ async def test_option_flow_sensor_preview_config_entry_removed( {}, {}, ), + ( + "image", + { + "url": "{{ states('sensor.one') }}", + "verify_ssl": True, + }, + {}, + {}, + ), + ( + "select", + {"state": "{{ states('select.one') }}"}, + {"options": "{{ ['off', 'on', 'auto'] }}"}, + {"options": "{{ ['off', 'on', 'auto'] }}"}, + ), + ( + "switch", + {"value_template": "{{ false }}"}, + {}, + {}, + ), ], ) async def test_options_flow_change_device( diff --git a/tests/components/template/test_image.py b/tests/components/template/test_image.py index bda9e2530ca..d4e98d7a3ca 100644 --- a/tests/components/template/test_image.py +++ b/tests/components/template/test_image.py @@ -8,6 +8,7 @@ import httpx from PIL import Image import pytest import respx +from syrupy.assertion import SnapshotAssertion from homeassistant import setup from homeassistant.components.input_text import ( @@ -15,12 +16,13 @@ from homeassistant.components.input_text import ( DOMAIN as INPUT_TEXT_DOMAIN, SERVICE_SET_VALUE as INPUT_TEXT_SERVICE_SET_VALUE, ) +from homeassistant.components.template import DOMAIN from homeassistant.const import ATTR_ENTITY_PICTURE, CONF_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.util import dt as dt_util -from tests.common import assert_setup_component +from tests.common import MockConfigEntry, assert_setup_component from tests.typing import ClientSessionGenerator _DEFAULT = object() @@ -74,6 +76,37 @@ async def _assert_state( assert body == expected_image +@pytest.mark.freeze_time("2024-07-09 00:00:00+00:00") +async def test_setup_config_entry( + hass: HomeAssistant, + snapshot: SnapshotAssertion, +) -> None: + """Test the config flow.""" + + respx.get("http://example.com").respond( + stream=imgbytes_jpg, content_type="image/jpeg" + ) + + template_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "My template", + "template_type": "image", + "url": "http://example.com", + }, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("image.my_template") + assert state is not None + assert state.state == "2024-07-09T00:00:00+00:00" + + @respx.mock @pytest.mark.freeze_time("2023-04-01 00:00:00+00:00") async def test_platform_config( @@ -503,3 +536,46 @@ async def test_trigger_image_custom_entity_picture( imgbytes_jpg, expected_entity_picture="http://example2.com", ) + + +async def test_device_id( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test for device for image template.""" + + device_config_entry = MockConfigEntry() + device_config_entry.add_to_hass(hass) + device_entry = device_registry.async_get_or_create( + config_entry_id=device_config_entry.entry_id, + identifiers={("test", "identifier_test")}, + connections={("mac", "30:31:32:33:34:35")}, + ) + await hass.async_block_till_done() + assert device_entry is not None + assert device_entry.id is not None + + respx.get("http://example.com").respond( + stream=imgbytes_jpg, content_type="image/jpeg" + ) + + template_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "My template", + "template_type": "image", + "url": "http://example.com", + "device_id": device_entry.id, + }, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + template_entity = entity_registry.async_get("image.my_template") + assert template_entity is not None + assert template_entity.device_id == device_entry.id diff --git a/tests/components/template/test_init.py b/tests/components/template/test_init.py index 58f75560878..1face4bfda0 100644 --- a/tests/components/template/test_init.py +++ b/tests/components/template/test_init.py @@ -297,6 +297,16 @@ async def async_yaml_patch_helper(hass, filename): "state": "{{1 == 2}}", }, ), + ( + { + "template_type": "image", + "name": "My template", + "url": "http://example.com", + }, + { + "url": "http://example.com", + }, + ), ( { "template_type": "button", @@ -304,6 +314,28 @@ async def async_yaml_patch_helper(hass, filename): }, {}, ), + ( + { + "template_type": "select", + "name": "My template", + "state": "{{ 'on' }}", + "options": "{{ ['off', 'on', 'auto'] }}", + }, + { + "state": "{{ 'on' }}", + "options": "{{ ['off', 'on', 'auto'] }}", + }, + ), + ( + { + "template_type": "switch", + "name": "My template", + "value_template": "{{ true }}", + }, + { + "value_template": "{{ true }}", + }, + ), ], ) async def test_change_device( diff --git a/tests/components/template/test_select.py b/tests/components/template/test_select.py index 4106abdd469..2268c0840aa 100644 --- a/tests/components/template/test_select.py +++ b/tests/components/template/test_select.py @@ -1,5 +1,7 @@ """The tests for the Template select platform.""" +from syrupy.assertion import SnapshotAssertion + from homeassistant import setup from homeassistant.components.input_select import ( ATTR_OPTION as INPUT_SELECT_ATTR_OPTION, @@ -14,17 +16,45 @@ from homeassistant.components.select import ( DOMAIN as SELECT_DOMAIN, SERVICE_SELECT_OPTION as SELECT_SERVICE_SELECT_OPTION, ) +from homeassistant.components.template import DOMAIN from homeassistant.const import ATTR_ICON, CONF_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import Context, HomeAssistant, ServiceCall -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er -from tests.common import assert_setup_component, async_capture_events +from tests.common import MockConfigEntry, assert_setup_component, async_capture_events _TEST_SELECT = "select.template_select" # Represent for select's current_option _OPTION_INPUT_SELECT = "input_select.option" +async def test_setup_config_entry( + hass: HomeAssistant, + snapshot: SnapshotAssertion, +) -> None: + """Test the config flow.""" + + template_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "My template", + "template_type": "select", + "state": "{{ 'on' }}", + "options": "{{ ['off', 'on', 'auto'] }}", + }, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("select.my_template") + assert state is not None + assert state == snapshot + + async def test_missing_optional_config(hass: HomeAssistant) -> None: """Test: missing optional template is ok.""" with assert_setup_component(1, "template"): @@ -428,3 +458,43 @@ async def test_template_icon_with_trigger(hass: HomeAssistant) -> None: state = hass.states.get(_TEST_SELECT) assert state.state == "a" assert state.attributes[ATTR_ICON] == "mdi:greater" + + +async def test_device_id( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test for device for select template.""" + + device_config_entry = MockConfigEntry() + device_config_entry.add_to_hass(hass) + device_entry = device_registry.async_get_or_create( + config_entry_id=device_config_entry.entry_id, + identifiers={("test", "identifier_test")}, + connections={("mac", "30:31:32:33:34:35")}, + ) + await hass.async_block_till_done() + assert device_entry is not None + assert device_entry.id is not None + + template_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "My template", + "template_type": "select", + "state": "{{ 'on' }}", + "options": "{{ ['off', 'on', 'auto'] }}", + "device_id": device_entry.id, + }, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + template_entity = entity_registry.async_get("select.my_template") + assert template_entity is not None + assert template_entity.device_id == device_entry.id diff --git a/tests/components/template/test_switch.py b/tests/components/template/test_switch.py index 68cca990ef1..2fc0f29acaf 100644 --- a/tests/components/template/test_switch.py +++ b/tests/components/template/test_switch.py @@ -1,8 +1,10 @@ """The tests for the Template switch platform.""" import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant import setup +from homeassistant.components import template from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, @@ -13,9 +15,15 @@ from homeassistant.const import ( STATE_UNAVAILABLE, ) from homeassistant.core import CoreState, HomeAssistant, ServiceCall, State +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component -from tests.common import assert_setup_component, mock_component, mock_restore_cache +from tests.common import ( + MockConfigEntry, + assert_setup_component, + mock_component, + mock_restore_cache, +) OPTIMISTIC_SWITCH_CONFIG = { "turn_on": { @@ -35,6 +43,38 @@ OPTIMISTIC_SWITCH_CONFIG = { } +async def test_setup_config_entry( + hass: HomeAssistant, + snapshot: SnapshotAssertion, +) -> None: + """Test the config flow.""" + + hass.states.async_set( + "switch.one", + "on", + {}, + ) + + template_config_entry = MockConfigEntry( + data={}, + domain=template.DOMAIN, + options={ + "name": "My template", + "value_template": "{{ states('switch.one') }}", + "template_type": SWITCH_DOMAIN, + }, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("switch.my_template") + assert state is not None + assert state == snapshot + + async def test_template_state_text(hass: HomeAssistant) -> None: """Test the state text of a template.""" with assert_setup_component(1, "switch"): @@ -655,3 +695,42 @@ async def test_unique_id(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert len(hass.states.async_all("switch")) == 1 + + +async def test_device_id( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test for device for Template.""" + + device_config_entry = MockConfigEntry() + device_config_entry.add_to_hass(hass) + device_entry = device_registry.async_get_or_create( + config_entry_id=device_config_entry.entry_id, + identifiers={("test", "identifier_test")}, + connections={("mac", "30:31:32:33:34:35")}, + ) + await hass.async_block_till_done() + assert device_entry is not None + assert device_entry.id is not None + + template_config_entry = MockConfigEntry( + data={}, + domain=template.DOMAIN, + options={ + "name": "My template", + "value_template": "{{ true }}", + "template_type": "switch", + "device_id": device_entry.id, + }, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + template_entity = entity_registry.async_get("switch.my_template") + assert template_entity is not None + assert template_entity.device_id == device_entry.id diff --git a/tests/components/tessie/fixtures/online.json b/tests/components/tessie/fixtures/online.json index ed49b4bfd75..38b904cdffb 100644 --- a/tests/components/tessie/fixtures/online.json +++ b/tests/components/tessie/fixtures/online.json @@ -98,6 +98,8 @@ "passenger_temp_setting": 22.5, "remote_heater_control_enabled": false, "right_temp_direction": 234, + "seat_fan_front_left": 0, + "seat_fan_front_right": 0, "seat_heater_left": 0, "seat_heater_rear_center": 0, "seat_heater_rear_left": 0, @@ -157,7 +159,7 @@ "exterior_trim_override": "", "has_air_suspension": false, "has_ludicrous_mode": false, - "has_seat_cooling": false, + "has_seat_cooling": true, "headlamp_type": "Global", "interior_trim_type": "White2", "key_version": 2, @@ -173,7 +175,7 @@ "roof_color": "RoofColorGlass", "seat_type": null, "spoiler_type": "None", - "sun_roof_installed": null, + "sun_roof_installed": true, "supports_qr_pairing": false, "third_row_seats": "None", "timestamp": 1701139037461, diff --git a/tests/components/tessie/fixtures/vehicles.json b/tests/components/tessie/fixtures/vehicles.json index 359e23f9cdd..622b31bae69 100644 --- a/tests/components/tessie/fixtures/vehicles.json +++ b/tests/components/tessie/fixtures/vehicles.json @@ -111,6 +111,8 @@ "passenger_temp_setting": 22.5, "remote_heater_control_enabled": false, "right_temp_direction": 234, + "seat_fan_front_left": 0, + "seat_fan_front_right": 0, "seat_heater_left": 0, "seat_heater_rear_center": 0, "seat_heater_rear_left": 0, @@ -174,7 +176,7 @@ "exterior_trim_override": "", "has_air_suspension": false, "has_ludicrous_mode": false, - "has_seat_cooling": false, + "has_seat_cooling": true, "headlamp_type": "Global", "interior_trim_type": "White2", "key_version": 2, @@ -190,7 +192,7 @@ "roof_color": "RoofColorGlass", "seat_type": null, "spoiler_type": "None", - "sun_roof_installed": null, + "sun_roof_installed": true, "supports_qr_pairing": false, "third_row_seats": "None", "timestamp": 1701139037461, diff --git a/tests/components/tessie/snapshots/test_cover.ambr b/tests/components/tessie/snapshots/test_cover.ambr index ff04c528244..8c8c9a48c11 100644 --- a/tests/components/tessie/snapshots/test_cover.ambr +++ b/tests/components/tessie/snapshots/test_cover.ambr @@ -95,6 +95,87 @@ 'state': 'closed', }) # --- +# name: test_covers[cover.test_none-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_none', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'vehicle_state_sun_roof_state', + 'unique_id': 'VINVINVIN-vehicle_state_sun_roof_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_covers[cover.test_sunroof-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_sunroof', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Sunroof', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'vehicle_state_sun_roof_state', + 'unique_id': 'VINVINVIN-vehicle_state_sun_roof_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_covers[cover.test_sunroof-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Sunroof', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_sunroof', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- # name: test_covers[cover.test_trunk-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tessie/snapshots/test_diagnostics.ambr b/tests/components/tessie/snapshots/test_diagnostics.ambr index a399f56c94d..8eef7cbd549 100644 --- a/tests/components/tessie/snapshots/test_diagnostics.ambr +++ b/tests/components/tessie/snapshots/test_diagnostics.ambr @@ -247,6 +247,8 @@ 'climate_state_passenger_temp_setting': 22.5, 'climate_state_remote_heater_control_enabled': False, 'climate_state_right_temp_direction': 234, + 'climate_state_seat_fan_front_left': 0, + 'climate_state_seat_fan_front_right': 0, 'climate_state_seat_heater_left': 0, 'climate_state_seat_heater_rear_center': 0, 'climate_state_seat_heater_rear_left': 0, @@ -313,7 +315,7 @@ 'vehicle_config_exterior_trim_override': '', 'vehicle_config_has_air_suspension': False, 'vehicle_config_has_ludicrous_mode': False, - 'vehicle_config_has_seat_cooling': False, + 'vehicle_config_has_seat_cooling': True, 'vehicle_config_headlamp_type': 'Global', 'vehicle_config_interior_trim_type': 'White2', 'vehicle_config_key_version': 2, @@ -329,7 +331,7 @@ 'vehicle_config_roof_color': 'RoofColorGlass', 'vehicle_config_seat_type': None, 'vehicle_config_spoiler_type': 'None', - 'vehicle_config_sun_roof_installed': None, + 'vehicle_config_sun_roof_installed': True, 'vehicle_config_supports_qr_pairing': False, 'vehicle_config_third_row_seats': 'None', 'vehicle_config_timestamp': 1701139037461, diff --git a/tests/components/tessie/snapshots/test_select.ambr b/tests/components/tessie/snapshots/test_select.ambr index edd061a14e6..acc1946aab5 100644 --- a/tests/components/tessie/snapshots/test_select.ambr +++ b/tests/components/tessie/snapshots/test_select.ambr @@ -113,6 +113,124 @@ 'state': 'self_consumption', }) # --- +# name: test_select[select.test_seat_cooler_left-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.test_seat_cooler_left', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Seat cooler left', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_seat_fan_front_left', + 'unique_id': 'VINVINVIN-climate_state_seat_fan_front_left', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[select.test_seat_cooler_left-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Seat cooler left', + 'options': list([ + , + , + , + , + ]), + }), + 'context': , + 'entity_id': 'select.test_seat_cooler_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_select[select.test_seat_cooler_right-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.test_seat_cooler_right', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Seat cooler right', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_seat_fan_front_right', + 'unique_id': 'VINVINVIN-climate_state_seat_fan_front_right', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[select.test_seat_cooler_right-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Seat cooler right', + 'options': list([ + , + , + , + , + ]), + }), + 'context': , + 'entity_id': 'select.test_seat_cooler_right', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_select[select.test_seat_heater_left-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tessie/test_cover.py b/tests/components/tessie/test_cover.py index b731add10f8..be4dda3ec7b 100644 --- a/tests/components/tessie/test_cover.py +++ b/tests/components/tessie/test_cover.py @@ -42,6 +42,7 @@ async def test_covers( ("cover.test_charge_port_door", "open_unlock_charge_port", "close_charge_port"), ("cover.test_frunk", "open_front_trunk", False), ("cover.test_trunk", "open_close_rear_trunk", "open_close_rear_trunk"), + ("cover.test_sunroof", "vent_sunroof", "close_sunroof"), ): # Test open windows if openfunc: diff --git a/tests/components/tessie/test_select.py b/tests/components/tessie/test_select.py index 51645c75d47..c78923fbf5b 100644 --- a/tests/components/tessie/test_select.py +++ b/tests/components/tessie/test_select.py @@ -11,7 +11,10 @@ from homeassistant.components.select import ( DOMAIN as SELECT_DOMAIN, SERVICE_SELECT_OPTION, ) -from homeassistant.components.tessie.const import TessieSeatHeaterOptions +from homeassistant.components.tessie.const import ( + TessieSeatCoolerOptions, + TessieSeatHeaterOptions, +) from homeassistant.const import ATTR_ENTITY_ID, ATTR_OPTION, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -81,6 +84,22 @@ async def test_select( assert state.state == EnergyExportMode.BATTERY_OK.value call.assert_called_once() + # Test changing select + entity_id = "select.test_seat_cooler_left" + with patch( + "homeassistant.components.tessie.select.set_seat_cool", + return_value=TEST_RESPONSE, + ) as mock_set: + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: [entity_id], ATTR_OPTION: TessieSeatCoolerOptions.LOW}, + blocking=True, + ) + mock_set.assert_called_once() + assert mock_set.call_args[1]["seat"] == "front_left" + assert mock_set.call_args[1]["level"] == 1 + async def test_errors(hass: HomeAssistant) -> None: """Tests unknown error is handled.""" diff --git a/tests/components/todo/test_init.py b/tests/components/todo/test_init.py index cbb61434f1a..b62505b14b4 100644 --- a/tests/components/todo/test_init.py +++ b/tests/components/todo/test_init.py @@ -12,15 +12,22 @@ import voluptuous as vol from homeassistant.components import conversation from homeassistant.components.homeassistant.exposed_entities import async_expose_entity from homeassistant.components.todo import ( + ATTR_DESCRIPTION, + ATTR_DUE_DATE, + ATTR_DUE_DATETIME, + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, DOMAIN, TodoItem, TodoItemStatus, TodoListEntity, TodoListEntityFeature, + TodoServices, intent as todo_intent, ) from homeassistant.config_entries import ConfigEntry, ConfigEntryState, ConfigFlow -from homeassistant.const import Platform +from homeassistant.const import ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import intent @@ -230,11 +237,11 @@ async def test_list_todo_items( [ ({}, [ITEM_1, ITEM_2]), ( - {"status": [TodoItemStatus.COMPLETED, TodoItemStatus.NEEDS_ACTION]}, + {ATTR_STATUS: [TodoItemStatus.COMPLETED, TodoItemStatus.NEEDS_ACTION]}, [ITEM_1, ITEM_2], ), - ({"status": [TodoItemStatus.NEEDS_ACTION]}, [ITEM_1]), - ({"status": [TodoItemStatus.COMPLETED]}, [ITEM_2]), + ({ATTR_STATUS: [TodoItemStatus.NEEDS_ACTION]}, [ITEM_1]), + ({ATTR_STATUS: [TodoItemStatus.COMPLETED]}, [ITEM_2]), ], ) async def test_get_items_service( @@ -251,13 +258,13 @@ async def test_get_items_service( state = hass.states.get("todo.entity1") assert state assert state.state == "1" - assert state.attributes == {"supported_features": 15} + assert state.attributes == {ATTR_SUPPORTED_FEATURES: 15} result = await hass.services.async_call( DOMAIN, - "get_items", + TodoServices.GET_ITEMS, service_data, - target={"entity_id": "todo.entity1"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, return_response=True, ) @@ -297,9 +304,9 @@ async def test_add_item_service( await hass.services.async_call( DOMAIN, - "add_item", - {"item": "New item"}, - target={"entity_id": "todo.entity1"}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "New item"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -324,9 +331,9 @@ async def test_add_item_service_raises( with pytest.raises(HomeAssistantError, match="Ooops"): await hass.services.async_call( DOMAIN, - "add_item", - {"item": "New item"}, - target={"entity_id": "todo.entity1"}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "New item"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -335,21 +342,21 @@ async def test_add_item_service_raises( ("item_data", "expected_exception", "expected_error"), [ ({}, vol.Invalid, "required key not provided"), - ({"item": ""}, vol.Invalid, "length of value must be at least 1"), + ({ATTR_ITEM: ""}, vol.Invalid, "length of value must be at least 1"), ( - {"item": "Submit forms", "description": "Submit tax forms"}, + {ATTR_ITEM: "Submit forms", ATTR_DESCRIPTION: "Submit tax forms"}, ServiceValidationError, "does not support setting field: description", ), ( - {"item": "Submit forms", "due_date": "2023-11-17"}, + {ATTR_ITEM: "Submit forms", ATTR_DUE_DATE: "2023-11-17"}, ServiceValidationError, "does not support setting field: due_date", ), ( { - "item": "Submit forms", - "due_datetime": f"2023-11-17T17:00:00{TEST_OFFSET}", + ATTR_ITEM: "Submit forms", + ATTR_DUE_DATETIME: f"2023-11-17T17:00:00{TEST_OFFSET}", }, ServiceValidationError, "does not support setting field: due_datetime", @@ -370,9 +377,9 @@ async def test_add_item_service_invalid_input( with pytest.raises(expected_exception) as exc: await hass.services.async_call( DOMAIN, - "add_item", + TodoServices.ADD_ITEM, item_data, - target={"entity_id": "todo.entity1"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -384,7 +391,7 @@ async def test_add_item_service_invalid_input( [ ( TodoListEntityFeature.SET_DUE_DATE_ON_ITEM, - {"item": "New item", "due_date": "2023-11-13"}, + {ATTR_ITEM: "New item", ATTR_DUE_DATE: "2023-11-13"}, TodoItem( summary="New item", status=TodoItemStatus.NEEDS_ACTION, @@ -393,7 +400,10 @@ async def test_add_item_service_invalid_input( ), ( TodoListEntityFeature.SET_DUE_DATETIME_ON_ITEM, - {"item": "New item", "due_datetime": f"2023-11-13T17:00:00{TEST_OFFSET}"}, + { + ATTR_ITEM: "New item", + ATTR_DUE_DATETIME: f"2023-11-13T17:00:00{TEST_OFFSET}", + }, TodoItem( summary="New item", status=TodoItemStatus.NEEDS_ACTION, @@ -402,7 +412,7 @@ async def test_add_item_service_invalid_input( ), ( TodoListEntityFeature.SET_DUE_DATETIME_ON_ITEM, - {"item": "New item", "due_datetime": "2023-11-13T17:00:00+00:00"}, + {ATTR_ITEM: "New item", ATTR_DUE_DATETIME: "2023-11-13T17:00:00+00:00"}, TodoItem( summary="New item", status=TodoItemStatus.NEEDS_ACTION, @@ -411,7 +421,7 @@ async def test_add_item_service_invalid_input( ), ( TodoListEntityFeature.SET_DUE_DATETIME_ON_ITEM, - {"item": "New item", "due_datetime": "2023-11-13"}, + {ATTR_ITEM: "New item", ATTR_DUE_DATETIME: "2023-11-13"}, TodoItem( summary="New item", status=TodoItemStatus.NEEDS_ACTION, @@ -420,7 +430,7 @@ async def test_add_item_service_invalid_input( ), ( TodoListEntityFeature.SET_DESCRIPTION_ON_ITEM, - {"item": "New item", "description": "Submit revised draft"}, + {ATTR_ITEM: "New item", ATTR_DESCRIPTION: "Submit revised draft"}, TodoItem( summary="New item", status=TodoItemStatus.NEEDS_ACTION, @@ -443,9 +453,9 @@ async def test_add_item_service_extended_fields( await hass.services.async_call( DOMAIN, - "add_item", - {"item": "New item", **item_data}, - target={"entity_id": "todo.entity1"}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "New item", **item_data}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -465,9 +475,9 @@ async def test_update_todo_item_service_by_id( await hass.services.async_call( DOMAIN, - "update_item", - {"item": "1", "rename": "Updated item", "status": "completed"}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "1", ATTR_RENAME: "Updated item", ATTR_STATUS: "completed"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -490,9 +500,9 @@ async def test_update_todo_item_service_by_id_status_only( await hass.services.async_call( DOMAIN, - "update_item", - {"item": "1", "status": "completed"}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "1", ATTR_STATUS: "completed"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -515,9 +525,9 @@ async def test_update_todo_item_service_by_id_rename( await hass.services.async_call( DOMAIN, - "update_item", - {"item": "1", "rename": "Updated item"}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "1", "rename": "Updated item"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -540,9 +550,9 @@ async def test_update_todo_item_service_raises( await hass.services.async_call( DOMAIN, - "update_item", - {"item": "1", "rename": "Updated item", "status": "completed"}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "1", "rename": "Updated item", "status": "completed"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -550,9 +560,9 @@ async def test_update_todo_item_service_raises( with pytest.raises(HomeAssistantError, match="Ooops"): await hass.services.async_call( DOMAIN, - "update_item", - {"item": "1", "rename": "Updated item", "status": "completed"}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "1", "rename": "Updated item", "status": "completed"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -567,9 +577,9 @@ async def test_update_todo_item_service_by_summary( await hass.services.async_call( DOMAIN, - "update_item", - {"item": "Item #1", "rename": "Something else", "status": "completed"}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "Item #1", "rename": "Something else", "status": "completed"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -592,9 +602,9 @@ async def test_update_todo_item_service_by_summary_only_status( await hass.services.async_call( DOMAIN, - "update_item", - {"item": "Item #1", "rename": "Something else"}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "Item #1", "rename": "Something else"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -618,9 +628,9 @@ async def test_update_todo_item_service_by_summary_not_found( with pytest.raises(ServiceValidationError, match="Unable to find"): await hass.services.async_call( DOMAIN, - "update_item", - {"item": "Item #7", "status": "completed"}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "Item #7", "status": "completed"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -652,7 +662,7 @@ async def test_update_item_service_invalid_input( DOMAIN, "update_item", item_data, - target={"entity_id": "todo.entity1"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -677,9 +687,9 @@ async def test_update_todo_item_field_unsupported( with pytest.raises(ServiceValidationError, match="does not support"): await hass.services.async_call( DOMAIN, - "update_item", - {"item": "1", **update_data}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "1", **update_data}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -733,9 +743,9 @@ async def test_update_todo_item_extended_fields( await hass.services.async_call( DOMAIN, - "update_item", - {"item": "1", **update_data}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "1", **update_data}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -823,9 +833,9 @@ async def test_update_todo_item_extended_fields_overwrite_existing_values( await hass.services.async_call( DOMAIN, - "update_item", - {"item": "1", **update_data}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "1", **update_data}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -845,9 +855,9 @@ async def test_remove_todo_item_service_by_id( await hass.services.async_call( DOMAIN, - "remove_item", - {"item": ["1", "2"]}, - target={"entity_id": "todo.entity1"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["1", "2"]}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -868,9 +878,9 @@ async def test_remove_todo_item_service_raises( with pytest.raises(HomeAssistantError, match="Ooops"): await hass.services.async_call( DOMAIN, - "remove_item", - {"item": ["1", "2"]}, - target={"entity_id": "todo.entity1"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["1", "2"]}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -888,9 +898,9 @@ async def test_remove_todo_item_service_invalid_input( ): await hass.services.async_call( DOMAIN, - "remove_item", + TodoServices.REMOVE_ITEM, {}, - target={"entity_id": "todo.entity1"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -905,9 +915,9 @@ async def test_remove_todo_item_service_by_summary( await hass.services.async_call( DOMAIN, - "remove_item", - {"item": ["Item #1"]}, - target={"entity_id": "todo.entity1"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["Item #1"]}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -927,9 +937,9 @@ async def test_remove_todo_item_service_by_summary_not_found( with pytest.raises(ServiceValidationError, match="Unable to find"): await hass.services.async_call( DOMAIN, - "remove_item", - {"item": ["Item #7"]}, - target={"entity_id": "todo.entity1"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["Item #7"]}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -1035,26 +1045,26 @@ async def test_move_todo_item_service_invalid_input( ("service_name", "payload"), [ ( - "add_item", + TodoServices.ADD_ITEM, { - "item": "New item", + ATTR_ITEM: "New item", }, ), ( - "remove_item", + TodoServices.REMOVE_ITEM, { - "item": ["1"], + ATTR_ITEM: ["1"], }, ), ( - "update_item", + TodoServices.UPDATE_ITEM, { - "item": "1", - "rename": "Updated item", + ATTR_ITEM: "1", + ATTR_RENAME: "Updated item", }, ), ( - "remove_completed_items", + TodoServices.REMOVE_COMPLETED_ITEMS, None, ), ], @@ -1078,7 +1088,7 @@ async def test_unsupported_service( DOMAIN, service_name, payload, - target={"entity_id": "todo.entity1"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -1131,7 +1141,7 @@ async def test_add_item_intent( hass, "test", todo_intent.INTENT_LIST_ADD_ITEM, - {"item": {"value": "beer"}, "name": {"value": "list 1"}}, + {ATTR_ITEM: {"value": "beer"}, "name": {"value": "list 1"}}, assistant=conversation.DOMAIN, ) assert response.response_type == intent.IntentResponseType.ACTION_DONE @@ -1147,7 +1157,7 @@ async def test_add_item_intent( hass, "test", todo_intent.INTENT_LIST_ADD_ITEM, - {"item": {"value": "cheese"}, "name": {"value": "List 2"}}, + {ATTR_ITEM: {"value": "cheese"}, "name": {"value": "List 2"}}, assistant=conversation.DOMAIN, ) assert response.response_type == intent.IntentResponseType.ACTION_DONE @@ -1162,7 +1172,7 @@ async def test_add_item_intent( hass, "test", todo_intent.INTENT_LIST_ADD_ITEM, - {"item": {"value": "wine"}, "name": {"value": "lIST 2"}}, + {ATTR_ITEM: {"value": "wine"}, "name": {"value": "lIST 2"}}, assistant=conversation.DOMAIN, ) assert response.response_type == intent.IntentResponseType.ACTION_DONE @@ -1224,8 +1234,8 @@ async def test_remove_completed_items_service( await hass.services.async_call( DOMAIN, - "remove_completed_items", - target={"entity_id": "todo.entity1"}, + TodoServices.REMOVE_COMPLETED_ITEMS, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -1238,8 +1248,8 @@ async def test_remove_completed_items_service( # calling service multiple times will not call the entity method await hass.services.async_call( DOMAIN, - "remove_completed_items", - target={"entity_id": "todo.entity1"}, + TodoServices.REMOVE_COMPLETED_ITEMS, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) test_entity.async_delete_todo_items.assert_not_called() @@ -1257,8 +1267,8 @@ async def test_remove_completed_items_service_raises( with pytest.raises(HomeAssistantError, match="Ooops"): await hass.services.async_call( DOMAIN, - "remove_completed_items", - target={"entity_id": "todo.entity1"}, + TodoServices.REMOVE_COMPLETED_ITEMS, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -1423,7 +1433,7 @@ async def test_list_todo_items_extended_fields( DOMAIN, "get_items", {}, - target={"entity_id": "todo.entity1"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, return_response=True, ) diff --git a/tests/components/todoist/test_todo.py b/tests/components/todoist/test_todo.py index 2aabfcc5755..1c2da67fb02 100644 --- a/tests/components/todoist/test_todo.py +++ b/tests/components/todoist/test_todo.py @@ -6,8 +6,17 @@ from unittest.mock import AsyncMock import pytest from todoist_api_python.models import Due, Task -from homeassistant.components.todo import DOMAIN as TODO_DOMAIN -from homeassistant.const import Platform +from homeassistant.components.todo import ( + ATTR_DESCRIPTION, + ATTR_DUE_DATE, + ATTR_DUE_DATETIME, + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, + DOMAIN as TODO_DOMAIN, + TodoServices, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_component import async_update_entity @@ -86,7 +95,7 @@ async def test_todo_item_state( ), ( [], - {"due_date": "2023-11-18"}, + {ATTR_DUE_DATE: "2023-11-18"}, [ make_api_task( id="task-id-1", @@ -105,7 +114,7 @@ async def test_todo_item_state( ), ( [], - {"due_datetime": "2023-11-18T06:30:00"}, + {ATTR_DUE_DATETIME: "2023-11-18T06:30:00"}, [ make_api_task( id="task-id-1", @@ -132,7 +141,7 @@ async def test_todo_item_state( ), ( [], - {"description": "6-pack"}, + {ATTR_DESCRIPTION: "6-pack"}, [ make_api_task( id="task-id-1", @@ -173,9 +182,9 @@ async def test_add_todo_list_item( await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "Soda", **item_data}, - target={"entity_id": "todo.name"}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Soda", **item_data}, + target={ATTR_ENTITY_ID: "todo.name"}, blocking=True, ) @@ -190,9 +199,9 @@ async def test_add_todo_list_item( result = await hass.services.async_call( TODO_DOMAIN, - "get_items", + TodoServices.GET_ITEMS, {}, - target={"entity_id": "todo.name"}, + target={ATTR_ENTITY_ID: "todo.name"}, blocking=True, return_response=True, ) @@ -223,9 +232,9 @@ async def test_update_todo_item_status( await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "task-id-1", "status": "completed"}, - target={"entity_id": "todo.name"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "task-id-1", ATTR_STATUS: "completed"}, + target={ATTR_ENTITY_ID: "todo.name"}, blocking=True, ) assert api.close_task.called @@ -246,9 +255,9 @@ async def test_update_todo_item_status( await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "task-id-1", "status": "needs_action"}, - target={"entity_id": "todo.name"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "task-id-1", ATTR_STATUS: "needs_action"}, + target={ATTR_ENTITY_ID: "todo.name"}, blocking=True, ) assert api.reopen_task.called @@ -274,7 +283,7 @@ async def test_update_todo_item_status( description="desc", ) ], - {"rename": "Milk"}, + {ATTR_RENAME: "Milk"}, [ make_api_task( id="task-id-1", @@ -298,7 +307,7 @@ async def test_update_todo_item_status( ), ( [make_api_task(id="task-id-1", content="Soda", is_completed=False)], - {"due_date": "2023-11-18"}, + {ATTR_DUE_DATE: "2023-11-18"}, [ make_api_task( id="task-id-1", @@ -322,7 +331,7 @@ async def test_update_todo_item_status( ), ( [make_api_task(id="task-id-1", content="Soda", is_completed=False)], - {"due_datetime": "2023-11-18T06:30:00"}, + {ATTR_DUE_DATETIME: "2023-11-18T06:30:00"}, [ make_api_task( id="task-id-1", @@ -351,7 +360,7 @@ async def test_update_todo_item_status( ), ( [make_api_task(id="task-id-1", content="Soda", is_completed=False)], - {"description": "6-pack"}, + {ATTR_DESCRIPTION: "6-pack"}, [ make_api_task( id="task-id-1", @@ -382,7 +391,7 @@ async def test_update_todo_item_status( is_completed=False, ) ], - {"description": None}, + {ATTR_DESCRIPTION: None}, [ make_api_task( id="task-id-1", @@ -415,7 +424,7 @@ async def test_update_todo_item_status( due=Due(date="2024-01-01", is_recurring=True, string="every day"), ) ], - {"due_date": "2024-02-01"}, + {ATTR_DUE_DATE: "2024-02-01"}, [ make_api_task( id="task-id-1", @@ -472,9 +481,9 @@ async def test_update_todo_items( await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "task-id-1", **update_data}, - target={"entity_id": "todo.name"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "task-id-1", **update_data}, + target={ATTR_ENTITY_ID: "todo.name"}, blocking=True, ) assert api.update_task.called @@ -484,9 +493,9 @@ async def test_update_todo_items( result = await hass.services.async_call( TODO_DOMAIN, - "get_items", + TodoServices.GET_ITEMS, {}, - target={"entity_id": "todo.name"}, + target={ATTR_ENTITY_ID: "todo.name"}, blocking=True, return_response=True, ) @@ -519,9 +528,9 @@ async def test_remove_todo_item( await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": ["task-id-1", "task-id-2"]}, - target={"entity_id": "todo.name"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["task-id-1", "task-id-2"]}, + target={ATTR_ENTITY_ID: "todo.name"}, blocking=True, ) assert api.delete_task.call_count == 2 @@ -575,9 +584,9 @@ async def test_subscribe( ] await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "Cheese", "rename": "Wine"}, - target={"entity_id": "todo.name"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "Cheese", ATTR_RENAME: "Wine"}, + target={ATTR_ENTITY_ID: "todo.name"}, blocking=True, ) diff --git a/tests/components/tplink/test_light.py b/tests/components/tplink/test_light.py index bb814d1f5d3..590274b8405 100644 --- a/tests/components/tplink/test_light.py +++ b/tests/components/tplink/test_light.py @@ -533,16 +533,16 @@ async def test_smart_strip_effects(hass: HomeAssistant) -> None: assert state.attributes[ATTR_EFFECT_LIST] == ["Off", "Effect1", "Effect2"] # Ensure setting color temp when an effect - # is in progress calls set_hsv to clear the effect + # is in progress calls set_effect to clear the effect await hass.services.async_call( LIGHT_DOMAIN, "turn_on", {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 4000}, blocking=True, ) - light.set_hsv.assert_called_once_with(0, 0, None) + light_effect.set_effect.assert_called_once_with(LightEffect.LIGHT_EFFECTS_OFF) light.set_color_temp.assert_called_once_with(4000, brightness=None, transition=None) - light.set_hsv.reset_mock() + light_effect.set_effect.reset_mock() light.set_color_temp.reset_mock() await hass.services.async_call( diff --git a/tests/components/trace/test_websocket_api.py b/tests/components/trace/test_websocket_api.py index 92ba2c67020..c7e445833ae 100644 --- a/tests/components/trace/test_websocket_api.py +++ b/tests/components/trace/test_websocket_api.py @@ -207,7 +207,7 @@ async def test_get_trace( _assert_raw_config(domain, sun_config, trace) assert trace["blueprint_inputs"] is None assert trace["context"] - assert trace["error"] == "Service test.automation not found" + assert trace["error"] == "Action test.automation not found" assert trace["state"] == "stopped" assert trace["script_execution"] == "error" assert trace["item_id"] == "sun" @@ -899,7 +899,7 @@ async def test_list_traces( assert len(_find_traces(response["result"], domain, "sun")) == 1 trace = _find_traces(response["result"], domain, "sun")[0] assert trace["last_step"] == last_step[0].format(prefix=prefix) - assert trace["error"] == "Service test.automation not found" + assert trace["error"] == "Action test.automation not found" assert trace["state"] == "stopped" assert trace["script_execution"] == script_execution[0] assert trace["timestamp"] @@ -1639,7 +1639,7 @@ async def test_trace_blueprint_automation( assert trace["config"]["id"] == "sun" assert trace["blueprint_inputs"] == sun_config assert trace["context"] - assert trace["error"] == "Service test.automation not found" + assert trace["error"] == "Action test.automation not found" assert trace["state"] == "stopped" assert trace["script_execution"] == "error" assert trace["item_id"] == "sun" diff --git a/tests/components/unifi/test_config_flow.py b/tests/components/unifi/test_config_flow.py index 7b37437cd1d..9ae3af19b46 100644 --- a/tests/components/unifi/test_config_flow.py +++ b/tests/components/unifi/test_config_flow.py @@ -1,5 +1,6 @@ """Test UniFi Network config flow.""" +from collections.abc import Callable import socket from unittest.mock import PropertyMock, patch @@ -338,6 +339,44 @@ async def test_reauth_flow_update_configuration( assert config_entry.data[CONF_PASSWORD] == "new_pass" +async def test_reauth_flow_update_configuration_on_not_loaded_entry( + hass: HomeAssistant, config_entry_factory: Callable[[], ConfigEntry] +) -> None: + """Verify reauth flow can update hub configuration on a not loaded entry.""" + with patch("aiounifi.Controller.login", side_effect=aiounifi.errors.RequestError): + config_entry = await config_entry_factory() + + result = await hass.config_entries.flow.async_init( + UNIFI_DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": config_entry.unique_id, + "entry_id": config_entry.entry_id, + }, + data=config_entry.data, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "1.2.3.4", + CONF_USERNAME: "new_name", + CONF_PASSWORD: "new_pass", + CONF_PORT: 1234, + CONF_VERIFY_SSL: True, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert config_entry.data[CONF_HOST] == "1.2.3.4" + assert config_entry.data[CONF_USERNAME] == "new_name" + assert config_entry.data[CONF_PASSWORD] == "new_pass" + + @pytest.mark.parametrize("client_payload", [CLIENTS]) @pytest.mark.parametrize("device_payload", [DEVICES]) @pytest.mark.parametrize("wlan_payload", [WLANS]) diff --git a/tests/components/upb/test_config_flow.py b/tests/components/upb/test_config_flow.py index d5d6d70bb68..54aeb00e89a 100644 --- a/tests/components/upb/test_config_flow.py +++ b/tests/components/upb/test_config_flow.py @@ -1,7 +1,7 @@ """Test the UPB Control config flow.""" from asyncio import TimeoutError -from unittest.mock import MagicMock, PropertyMock, patch +from unittest.mock import AsyncMock, PropertyMock, patch from homeassistant import config_entries from homeassistant.components.upb.const import DOMAIN @@ -15,11 +15,11 @@ def mocked_upb(sync_complete=True, config_ok=True): def _upb_lib_connect(callback): callback() - upb_mock = MagicMock() + upb_mock = AsyncMock() type(upb_mock).network_id = PropertyMock(return_value="42") type(upb_mock).config_ok = PropertyMock(return_value=config_ok) if sync_complete: - upb_mock.connect.side_effect = _upb_lib_connect + upb_mock.async_connect.side_effect = _upb_lib_connect return patch( "homeassistant.components.upb.config_flow.upb_lib.UpbPim", return_value=upb_mock ) diff --git a/tests/components/zha/test_select.py b/tests/components/zha/test_select.py index a39172b850e..f0f742503e3 100644 --- a/tests/components/zha/test_select.py +++ b/tests/components/zha/test_select.py @@ -13,12 +13,19 @@ from homeassistant.components.zha.helpers import ( get_zha_gateway, get_zha_gateway_proxy, ) -from homeassistant.const import STATE_UNKNOWN, EntityCategory, Platform -from homeassistant.core import HomeAssistant +from homeassistant.const import ( + STATE_UNAVAILABLE, + STATE_UNKNOWN, + EntityCategory, + Platform, +) +from homeassistant.core import HomeAssistant, State from homeassistant.helpers import entity_registry as er from .common import find_entity_id +from tests.common import mock_restore_cache + @pytest.fixture(autouse=True) def select_select_only(): @@ -103,3 +110,51 @@ async def test_select( state = hass.states.get(entity_id) assert state assert state.state == security.IasWd.Warning.WarningMode.Burglar.name + + +@pytest.mark.parametrize( + ("restored_state", "expected_state"), + [ + # Unavailable is not restored + (STATE_UNAVAILABLE, STATE_UNKNOWN), + # Normal state is + ( + security.IasWd.Warning.WarningMode.Burglar.name, + security.IasWd.Warning.WarningMode.Burglar.name, + ), + ], +) +async def test_select_restore_state( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + setup_zha, + zigpy_device_mock, + restored_state: str, + expected_state: str, +) -> None: + """Test ZHA select platform restore state.""" + entity_id = "select.fakemanufacturer_fakemodel_default_siren_tone" + + mock_restore_cache(hass, [State(entity_id, restored_state)]) + + await setup_zha() + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [general.Basic.cluster_id, security.IasWd.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.IAS_WARNING_DEVICE, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + } + ) + + gateway = get_zha_gateway(hass) + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get(entity_id) + assert state + assert state.state == expected_state diff --git a/tests/components/zha/test_websocket_api.py b/tests/components/zha/test_websocket_api.py index ea8ea39aed9..f6afee9eb83 100644 --- a/tests/components/zha/test_websocket_api.py +++ b/tests/components/zha/test_websocket_api.py @@ -440,9 +440,16 @@ async def test_list_groupable_devices( assert len(device_endpoints) == 0 -async def test_add_group(zha_client) -> None: +async def test_add_group(hass: HomeAssistant, zha_client) -> None: """Test adding and getting a new ZHA zigbee group.""" - await zha_client.send_json({ID: 12, TYPE: "zha/group/add", GROUP_NAME: "new_group"}) + await zha_client.send_json( + { + ID: 12, + TYPE: "zha/group/add", + GROUP_NAME: "new_group", + "members": [{"ieee": IEEE_GROUPABLE_DEVICE, "endpoint_id": 1}], + } + ) msg = await zha_client.receive_json() assert msg["id"] == 12 @@ -450,8 +457,17 @@ async def test_add_group(zha_client) -> None: added_group = msg["result"] + groupable_device = get_zha_gateway_proxy(hass).device_proxies[ + EUI64.convert(IEEE_GROUPABLE_DEVICE) + ] + assert added_group["name"] == "new_group" - assert added_group["members"] == [] + assert len(added_group["members"]) == 1 + assert added_group["members"][0]["device"]["ieee"] == IEEE_GROUPABLE_DEVICE + assert ( + added_group["members"][0]["device"]["device_reg_id"] + == groupable_device.device_id + ) await zha_client.send_json({ID: 13, TYPE: "zha/groups"}) @@ -499,6 +515,82 @@ async def test_remove_group(zha_client) -> None: assert len(groups) == 0 +async def test_add_group_member(hass: HomeAssistant, zha_client) -> None: + """Test adding a ZHA zigbee group member.""" + await zha_client.send_json( + { + ID: 12, + TYPE: "zha/group/add", + GROUP_NAME: "new_group", + } + ) + + msg = await zha_client.receive_json() + assert msg["id"] == 12 + assert msg["type"] == TYPE_RESULT + + added_group = msg["result"] + + assert len(added_group["members"]) == 0 + + await zha_client.send_json( + { + ID: 13, + TYPE: "zha/group/members/add", + GROUP_ID: added_group["group_id"], + "members": [{"ieee": IEEE_GROUPABLE_DEVICE, "endpoint_id": 1}], + } + ) + + msg = await zha_client.receive_json() + assert msg["id"] == 13 + assert msg["type"] == TYPE_RESULT + + added_group = msg["result"] + + assert len(added_group["members"]) == 1 + assert added_group["name"] == "new_group" + assert added_group["members"][0]["device"]["ieee"] == IEEE_GROUPABLE_DEVICE + + +async def test_remove_group_member(hass: HomeAssistant, zha_client) -> None: + """Test removing a ZHA zigbee group member.""" + await zha_client.send_json( + { + ID: 12, + TYPE: "zha/group/add", + GROUP_NAME: "new_group", + "members": [{"ieee": IEEE_GROUPABLE_DEVICE, "endpoint_id": 1}], + } + ) + + msg = await zha_client.receive_json() + assert msg["id"] == 12 + assert msg["type"] == TYPE_RESULT + + added_group = msg["result"] + + assert added_group["name"] == "new_group" + assert len(added_group["members"]) == 1 + assert added_group["members"][0]["device"]["ieee"] == IEEE_GROUPABLE_DEVICE + + await zha_client.send_json( + { + ID: 13, + TYPE: "zha/group/members/remove", + GROUP_ID: added_group["group_id"], + "members": [{"ieee": IEEE_GROUPABLE_DEVICE, "endpoint_id": 1}], + } + ) + + msg = await zha_client.receive_json() + assert msg["id"] == 13 + assert msg["type"] == TYPE_RESULT + + added_group = msg["result"] + assert len(added_group["members"]) == 0 + + @pytest.fixture async def app_controller( hass: HomeAssistant, setup_zha, zigpy_app_controller: ControllerApplication diff --git a/tests/conftest.py b/tests/conftest.py index b96bd783331..85f4671f6c0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1300,6 +1300,16 @@ def enable_migrate_entity_ids() -> bool: return False +@pytest.fixture +def enable_migrate_event_ids() -> bool: + """Fixture to control enabling of recorder's event id migration. + + To enable context id migration, tests can be marked with: + @pytest.mark.parametrize("enable_migrate_event_ids", [True]) + """ + return False + + @pytest.fixture def recorder_config() -> dict[str, Any] | None: """Fixture to override recorder config. @@ -1416,6 +1426,7 @@ async def async_test_recorder( enable_migrate_context_ids: bool, enable_migrate_event_type_ids: bool, enable_migrate_entity_ids: bool, + enable_migrate_event_ids: bool, ) -> AsyncGenerator[RecorderInstanceGenerator]: """Yield context manager to setup recorder instance.""" # pylint: disable-next=import-outside-toplevel @@ -1457,6 +1468,11 @@ async def async_test_recorder( migrate_entity_ids = ( recorder.Recorder._migrate_entity_ids if enable_migrate_entity_ids else None ) + legacy_event_id_foreign_key_exists = ( + recorder.Recorder._legacy_event_id_foreign_key_exists + if enable_migrate_event_ids + else None + ) with ( patch( "homeassistant.components.recorder.Recorder.async_nightly_tasks", @@ -1493,6 +1509,11 @@ async def async_test_recorder( side_effect=migrate_entity_ids, autospec=True, ), + patch( + "homeassistant.components.recorder.Recorder._legacy_event_id_foreign_key_exists", + side_effect=legacy_event_id_foreign_key_exists, + autospec=True, + ), patch( "homeassistant.components.recorder.Recorder._schedule_compile_missing_statistics", side_effect=compile_missing, diff --git a/tests/helpers/test_script.py b/tests/helpers/test_script.py index 08c196a04d3..52d9ff11059 100644 --- a/tests/helpers/test_script.py +++ b/tests/helpers/test_script.py @@ -3906,10 +3906,10 @@ async def test_parallel_error( assert len(events) == 0 expected_trace = { - "0": [{"error": "Service epic.failure not found"}], + "0": [{"error": "Action epic.failure not found"}], "0/parallel/0/sequence/0": [ { - "error": "Service epic.failure not found", + "error": "Action epic.failure not found", "result": { "params": { "domain": "epic", @@ -3958,7 +3958,7 @@ async def test_propagate_error_service_not_found(hass: HomeAssistant) -> None: expected_trace = { "0": [ { - "error": "Service test.script not found", + "error": "Action test.script not found", "result": { "params": { "domain": "test", @@ -5799,7 +5799,7 @@ async def test_continue_on_error_automation_issue(hass: HomeAssistant) -> None: { "0": [ { - "error": "Service service.not_found not found", + "error": "Action service.not_found not found", "result": { "params": { "domain": "service", diff --git a/tests/test_core.py b/tests/test_core.py index 29e3bf89137..14bde940c12 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -1715,7 +1715,7 @@ async def test_serviceregistry_service_that_not_exists(hass: HomeAssistant) -> N assert exc.value.domain == "test_do_not" assert exc.value.service == "exist" - assert str(exc.value) == "Service test_do_not.exist not found" + assert str(exc.value) == "Action test_do_not.exist not found" async def test_serviceregistry_async_service_raise_exception( @@ -1806,7 +1806,7 @@ async def test_services_call_return_response_requires_blocking( return_response=True, ) assert str(exc.value) == ( - "A non blocking service call with argument blocking=False " + "A non blocking action call with argument blocking=False " "can't be used together with argument return_response=True" ) @@ -1852,7 +1852,7 @@ async def test_serviceregistry_return_response_invalid( ("supports_response", "return_response", "expected_error"), [ (SupportsResponse.NONE, True, "does not return responses"), - (SupportsResponse.ONLY, False, "call requires responses"), + (SupportsResponse.ONLY, False, "action requires responses"), ], ) async def test_serviceregistry_return_response_arguments(