diff --git a/homeassistant/components/abode/manifest.json b/homeassistant/components/abode/manifest.json index de1000319f1..225edea40ca 100644 --- a/homeassistant/components/abode/manifest.json +++ b/homeassistant/components/abode/manifest.json @@ -9,5 +9,5 @@ }, "iot_class": "cloud_push", "loggers": ["jaraco.abode", "lomond"], - "requirements": ["jaraco.abode==5.1.2"] + "requirements": ["jaraco.abode==5.2.1"] } diff --git a/homeassistant/components/anova/manifest.json b/homeassistant/components/anova/manifest.json index 7e605edc217..7e032f0e361 100644 --- a/homeassistant/components/anova/manifest.json +++ b/homeassistant/components/anova/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/anova", "iot_class": "cloud_push", "loggers": ["anova_wifi"], - "requirements": ["anova-wifi==0.15.0"] + "requirements": ["anova-wifi==0.17.0"] } diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index f898ce64ce6..83d0e985b8a 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -28,5 +28,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==6.4.1", "yalexs-ble==2.4.3"] + "requirements": ["yalexs==6.4.2", "yalexs-ble==2.4.3"] } diff --git a/homeassistant/components/climate/__init__.py b/homeassistant/components/climate/__init__.py index bc81ce6e241..2c891779c37 100644 --- a/homeassistant/components/climate/__init__.py +++ b/homeassistant/components/climate/__init__.py @@ -377,6 +377,14 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): # Return if integration has migrated already return + supported_features = self.supported_features + if supported_features & ( + ClimateEntityFeature.TURN_ON | ClimateEntityFeature.TURN_OFF + ): + # The entity supports both turn_on and turn_off, the backwards compatibility + # checks are not needed + return + supported_features = self.supported_features if not supported_features & ClimateEntityFeature.TURN_OFF and ( type(self).async_turn_off is not ClimateEntity.async_turn_off diff --git a/homeassistant/components/enphase_envoy/manifest.json b/homeassistant/components/enphase_envoy/manifest.json index b3c117556bf..f5d2778fc9d 100644 --- a/homeassistant/components/enphase_envoy/manifest.json +++ b/homeassistant/components/enphase_envoy/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/enphase_envoy", "iot_class": "local_polling", "loggers": ["pyenphase"], - "requirements": ["pyenphase==1.20.3"], + "requirements": ["pyenphase==1.20.6"], "zeroconf": [ { "type": "_enphase-envoy._tcp.local." diff --git a/homeassistant/components/feedreader/config_flow.py b/homeassistant/components/feedreader/config_flow.py index 6fa153b8177..d367432ff8c 100644 --- a/homeassistant/components/feedreader/config_flow.py +++ b/homeassistant/components/feedreader/config_flow.py @@ -107,13 +107,6 @@ class FeedReaderConfigFlow(ConfigFlow, domain=DOMAIN): return self.abort_on_import_error(user_input[CONF_URL], "url_error") return self.show_user_form(user_input, {"base": "url_error"}) - if not feed.entries: - if self.context["source"] == SOURCE_IMPORT: - return self.abort_on_import_error( - user_input[CONF_URL], "no_feed_entries" - ) - return self.show_user_form(user_input, {"base": "no_feed_entries"}) - feed_title = feed["feed"]["title"] return self.async_create_entry( @@ -161,13 +154,6 @@ class FeedReaderConfigFlow(ConfigFlow, domain=DOMAIN): step_id="reconfigure_confirm", errors={"base": "url_error"}, ) - if not feed.entries: - return self.show_user_form( - user_input=user_input, - description_placeholders={"name": self._config_entry.title}, - step_id="reconfigure_confirm", - errors={"base": "no_feed_entries"}, - ) self.hass.config_entries.async_update_entry(self._config_entry, data=user_input) return self.async_abort(reason="reconfigure_successful") diff --git a/homeassistant/components/feedreader/strings.json b/homeassistant/components/feedreader/strings.json index 31881b4112a..da66333fa5b 100644 --- a/homeassistant/components/feedreader/strings.json +++ b/homeassistant/components/feedreader/strings.json @@ -18,8 +18,7 @@ "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" }, "error": { - "url_error": "The URL could not be opened.", - "no_feed_entries": "The URL seems not to serve any feed entries." + "url_error": "The URL could not be opened." } }, "options": { @@ -38,10 +37,6 @@ "import_yaml_error_url_error": { "title": "The Feedreader YAML configuration import failed", "description": "Configuring the Feedreader using YAML is being removed but there was a connection error when trying to import the YAML configuration for `{url}`.\n\nPlease verify that url is reachable and accessable for Home Assistant and restart Home Assistant to try again or remove the Feedreader YAML configuration from your configuration.yaml file and continue to set up the integration manually." - }, - "import_yaml_error_no_feed_entries": { - "title": "[%key:component::feedreader::issues::import_yaml_error_url_error::title%]", - "description": "Configuring the Feedreader using YAML is being removed but when trying to import the YAML configuration for `{url}` no feed entries were found.\n\nPlease verify that url serves any feed entries and restart Home Assistant to try again or remove the Feedreader YAML configuration from your configuration.yaml file and continue to set up the integration manually." } } } diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 186f725c643..57de177da9c 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240705.0"] + "requirements": ["home-assistant-frontend==20240710.0"] } diff --git a/homeassistant/components/fully_kiosk/camera.py b/homeassistant/components/fully_kiosk/camera.py index 99419271c26..d55875e094f 100644 --- a/homeassistant/components/fully_kiosk/camera.py +++ b/homeassistant/components/fully_kiosk/camera.py @@ -2,9 +2,12 @@ from __future__ import annotations +from fullykiosk import FullyKioskError + from homeassistant.components.camera import Camera, CameraEntityFeature from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN @@ -36,8 +39,12 @@ class FullyCameraEntity(FullyKioskEntity, Camera): self, width: int | None = None, height: int | None = None ) -> bytes | None: """Return bytes of camera image.""" - image_bytes: bytes = await self.coordinator.fully.getCamshot() - return image_bytes + try: + image_bytes: bytes = await self.coordinator.fully.getCamshot() + except FullyKioskError as err: + raise HomeAssistantError(err) from err + else: + return image_bytes async def async_turn_on(self) -> None: """Turn on camera.""" diff --git a/homeassistant/components/homematic/sensor.py b/homeassistant/components/homematic/sensor.py index eebcad95446..b33a725db0f 100644 --- a/homeassistant/components/homematic/sensor.py +++ b/homeassistant/components/homematic/sensor.py @@ -156,7 +156,6 @@ SENSOR_DESCRIPTIONS: dict[str, SensorEntityDescription] = { key="GAS_POWER", native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, device_class=SensorDeviceClass.GAS, - state_class=SensorStateClass.MEASUREMENT, ), "GAS_ENERGY_COUNTER": SensorEntityDescription( key="GAS_ENERGY_COUNTER", diff --git a/homeassistant/components/icloud/strings.json b/homeassistant/components/icloud/strings.json index 96db11d4656..22c711e919a 100644 --- a/homeassistant/components/icloud/strings.json +++ b/homeassistant/components/icloud/strings.json @@ -6,7 +6,7 @@ "description": "Enter your credentials", "data": { "username": "[%key:common::config_flow::data::email%]", - "password": "[%key:common::config_flow::data::password%]", + "password": "App-specific password", "with_family": "With family" } }, @@ -14,7 +14,7 @@ "title": "[%key:common::config_flow::title::reauth%]", "description": "Your previously entered password for {username} is no longer working. Update your password to keep using this integration.", "data": { - "password": "[%key:common::config_flow::data::password%]" + "password": "App-specific password" } }, "trusted_device": { diff --git a/homeassistant/components/kodi/media_player.py b/homeassistant/components/kodi/media_player.py index 290b3b1e566..46dee891e3a 100644 --- a/homeassistant/components/kodi/media_player.py +++ b/homeassistant/components/kodi/media_player.py @@ -641,12 +641,10 @@ class KodiEntity(MediaPlayerEntity): if self.state == MediaPlayerState.OFF: return state_attr - hdr_type = ( - self._item.get("streamdetails", {}).get("video", [{}])[0].get("hdrtype") - ) - if hdr_type == "": - state_attr["dynamic_range"] = "sdr" - else: + state_attr["dynamic_range"] = "sdr" + if (video_details := self._item.get("streamdetails", {}).get("video")) and ( + hdr_type := video_details[0].get("hdrtype") + ): state_attr["dynamic_range"] = hdr_type return state_attr diff --git a/homeassistant/components/matter/binary_sensor.py b/homeassistant/components/matter/binary_sensor.py index b71c35c9cce..a6d68682e9d 100644 --- a/homeassistant/components/matter/binary_sensor.py +++ b/homeassistant/components/matter/binary_sensor.py @@ -145,4 +145,20 @@ DISCOVERY_SCHEMAS = [ required_attributes=(clusters.BooleanState.Attributes.StateValue,), device_type=(device_types.RainSensor,), ), + MatterDiscoverySchema( + platform=Platform.BINARY_SENSOR, + entity_description=MatterBinarySensorEntityDescription( + key="LockDoorStateSensor", + device_class=BinarySensorDeviceClass.DOOR, + # pylint: disable=unnecessary-lambda + measurement_to_ha=lambda x: { + clusters.DoorLock.Enums.DoorStateEnum.kDoorOpen: True, + clusters.DoorLock.Enums.DoorStateEnum.kDoorJammed: True, + clusters.DoorLock.Enums.DoorStateEnum.kDoorForcedOpen: True, + clusters.DoorLock.Enums.DoorStateEnum.kDoorClosed: False, + }.get(x), + ), + entity_class=MatterBinarySensor, + required_attributes=(clusters.DoorLock.Attributes.DoorState,), + ), ] diff --git a/homeassistant/components/matter/lock.py b/homeassistant/components/matter/lock.py index 1cc85fa897e..ae01faa3bc7 100644 --- a/homeassistant/components/matter/lock.py +++ b/homeassistant/components/matter/lock.py @@ -2,6 +2,7 @@ from __future__ import annotations +import asyncio from typing import Any from chip.clusters import Objects as clusters @@ -38,6 +39,7 @@ class MatterLock(MatterEntity, LockEntity): """Representation of a Matter lock.""" features: int | None = None + _optimistic_timer: asyncio.TimerHandle | None = None @property def code_format(self) -> str | None: @@ -90,9 +92,15 @@ class MatterLock(MatterEntity, LockEntity): async def async_lock(self, **kwargs: Any) -> None: """Lock the lock with pin if needed.""" - # optimistically signal locking to state machine - self._attr_is_locking = True - self.async_write_ha_state() + if not self._attr_is_locked: + # optimistically signal locking to state machine + self._attr_is_locking = True + self.async_write_ha_state() + # the lock should acknowledge the command with an attribute update + # but bad things may happen, so guard against it with a timer. + self._optimistic_timer = self.hass.loop.call_later( + 30, self._reset_optimistic_state + ) code: str | None = kwargs.get(ATTR_CODE) code_bytes = code.encode() if code else None await self.send_device_command( @@ -101,9 +109,15 @@ class MatterLock(MatterEntity, LockEntity): async def async_unlock(self, **kwargs: Any) -> None: """Unlock the lock with pin if needed.""" - # optimistically signal unlocking to state machine - self._attr_is_unlocking = True - self.async_write_ha_state() + if self._attr_is_locked: + # optimistically signal unlocking to state machine + self._attr_is_unlocking = True + self.async_write_ha_state() + # the lock should acknowledge the command with an attribute update + # but bad things may happen, so guard against it with a timer. + self._optimistic_timer = self.hass.loop.call_later( + 30, self._reset_optimistic_state + ) code: str | None = kwargs.get(ATTR_CODE) code_bytes = code.encode() if code else None if self.supports_unbolt: @@ -120,9 +134,14 @@ class MatterLock(MatterEntity, LockEntity): async def async_open(self, **kwargs: Any) -> None: """Open the door latch.""" - # optimistically signal unlocking to state machine - self._attr_is_unlocking = True + # optimistically signal opening to state machine + self._attr_is_opening = True self.async_write_ha_state() + # the lock should acknowledge the command with an attribute update + # but bad things may happen, so guard against it with a timer. + self._optimistic_timer = self.hass.loop.call_later( + 30 if self._attr_is_locked else 5, self._reset_optimistic_state + ) code: str | None = kwargs.get(ATTR_CODE) code_bytes = code.encode() if code else None await self.send_device_command( @@ -145,38 +164,38 @@ class MatterLock(MatterEntity, LockEntity): ) # always reset the optimisically (un)locking state on state update - self._attr_is_locking = False - self._attr_is_unlocking = False + self._reset_optimistic_state(write_state=False) LOGGER.debug("Lock state: %s for %s", lock_state, self.entity_id) + if lock_state is clusters.DoorLock.Enums.DlLockState.kUnlatched: + self._attr_is_locked = False + self._attr_is_open = True if lock_state is clusters.DoorLock.Enums.DlLockState.kLocked: self._attr_is_locked = True + self._attr_is_open = False elif lock_state in ( clusters.DoorLock.Enums.DlLockState.kUnlocked, - clusters.DoorLock.Enums.DlLockState.kUnlatched, clusters.DoorLock.Enums.DlLockState.kNotFullyLocked, ): self._attr_is_locked = False + self._attr_is_open = False else: - # According to the matter docs a null state can happen during device startup. + # Treat any other state as unknown. + # NOTE: A null state can happen during device startup. self._attr_is_locked = None + self._attr_is_open = None - if self.supports_door_position_sensor: - door_state = self.get_matter_attribute_value( - clusters.DoorLock.Attributes.DoorState - ) - - assert door_state is not None - - LOGGER.debug("Door state: %s for %s", door_state, self.entity_id) - - self._attr_is_jammed = ( - door_state is clusters.DoorLock.Enums.DoorStateEnum.kDoorJammed - ) - self._attr_is_open = ( - door_state is clusters.DoorLock.Enums.DoorStateEnum.kDoorOpen - ) + @callback + def _reset_optimistic_state(self, write_state: bool = True) -> None: + if self._optimistic_timer and not self._optimistic_timer.cancelled(): + self._optimistic_timer.cancel() + self._optimistic_timer = None + self._attr_is_locking = False + self._attr_is_unlocking = False + self._attr_is_opening = False + if write_state: + self.async_write_ha_state() DISCOVERY_SCHEMAS = [ diff --git a/homeassistant/components/mealie/calendar.py b/homeassistant/components/mealie/calendar.py index fb628754f06..1b1c14c2ca0 100644 --- a/homeassistant/components/mealie/calendar.py +++ b/homeassistant/components/mealie/calendar.py @@ -60,7 +60,8 @@ class MealieMealplanCalendarEntity(MealieEntity, CalendarEntity): mealplans = self.coordinator.data[self._entry_type] if not mealplans: return None - return _get_event_from_mealplan(mealplans[0]) + sorted_mealplans = sorted(mealplans, key=lambda x: x.mealplan_date) + return _get_event_from_mealplan(sorted_mealplans[0]) async def async_get_events( self, hass: HomeAssistant, start_date: datetime, end_date: datetime diff --git a/homeassistant/components/mealie/strings.json b/homeassistant/components/mealie/strings.json index 0d67bb89759..7a89fb85128 100644 --- a/homeassistant/components/mealie/strings.json +++ b/homeassistant/components/mealie/strings.json @@ -3,8 +3,11 @@ "step": { "user": { "data": { - "host": "[%key:common::config_flow::data::host%]", + "host": "[%key:common::config_flow::data::url%]", "api_token": "[%key:common::config_flow::data::api_token%]" + }, + "data_description": { + "host": "The URL of your Mealie instance." } } }, diff --git a/homeassistant/components/mobile_app/webhook.py b/homeassistant/components/mobile_app/webhook.py index e93b4c5ea99..125e4d27247 100644 --- a/homeassistant/components/mobile_app/webhook.py +++ b/homeassistant/components/mobile_app/webhook.py @@ -721,10 +721,15 @@ async def webhook_get_config( """Handle a get config webhook.""" hass_config = hass.config.as_dict() + device: dr.DeviceEntry = hass.data[DOMAIN][DATA_DEVICES][ + config_entry.data[CONF_WEBHOOK_ID] + ] + resp = { "latitude": hass_config["latitude"], "longitude": hass_config["longitude"], "elevation": hass_config["elevation"], + "hass_device_id": device.id, "unit_system": hass_config["unit_system"], "location_name": hass_config["location_name"], "time_zone": hass_config["time_zone"], diff --git a/homeassistant/components/modbus/manifest.json b/homeassistant/components/modbus/manifest.json index 5635adc9392..292a2ee86a8 100644 --- a/homeassistant/components/modbus/manifest.json +++ b/homeassistant/components/modbus/manifest.json @@ -6,5 +6,5 @@ "iot_class": "local_polling", "loggers": ["pymodbus"], "quality_scale": "platinum", - "requirements": ["pymodbus==3.6.8"] + "requirements": ["pymodbus==3.6.9"] } diff --git a/homeassistant/components/mpd/manifest.json b/homeassistant/components/mpd/manifest.json index e03005fb95a..a361152670a 100644 --- a/homeassistant/components/mpd/manifest.json +++ b/homeassistant/components/mpd/manifest.json @@ -2,6 +2,7 @@ "domain": "mpd", "name": "Music Player Daemon (MPD)", "codeowners": [], + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/mpd", "iot_class": "local_polling", "loggers": ["mpd"], diff --git a/homeassistant/components/ombi/__init__.py b/homeassistant/components/ombi/__init__.py index a4cbe39f3e0..d63f72592f8 100644 --- a/homeassistant/components/ombi/__init__.py +++ b/homeassistant/components/ombi/__init__.py @@ -73,7 +73,7 @@ CONFIG_SCHEMA = vol.Schema( vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean, } ), - cv.has_at_least_one_key("auth"), + cv.has_at_least_one_key(CONF_API_KEY, CONF_PASSWORD), ) }, extra=vol.ALLOW_EXTRA, diff --git a/homeassistant/components/philips_js/light.py b/homeassistant/components/philips_js/light.py index 8e500592704..1d63b2062e6 100644 --- a/homeassistant/components/philips_js/light.py +++ b/homeassistant/components/philips_js/light.py @@ -385,6 +385,6 @@ class PhilipsTVLightEntity(PhilipsJsEntity, LightEntity): """Return true if entity is available.""" if not super().available: return False - if not self.coordinator.api.on: + if not self._tv.on: return False - return self.coordinator.api.powerstate == "On" + return True diff --git a/homeassistant/components/rdw/manifest.json b/homeassistant/components/rdw/manifest.json index f44dc7e0f12..7af3e861347 100644 --- a/homeassistant/components/rdw/manifest.json +++ b/homeassistant/components/rdw/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "quality_scale": "platinum", - "requirements": ["vehicle==2.2.1"] + "requirements": ["vehicle==2.2.2"] } diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 561b446f493..cf003f72af4 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -24,7 +24,7 @@ from sqlalchemy.exc import ( SQLAlchemyError, ) from sqlalchemy.orm.session import Session -from sqlalchemy.schema import AddConstraint, DropConstraint +from sqlalchemy.schema import AddConstraint, CreateTable, DropConstraint from sqlalchemy.sql.expression import true from sqlalchemy.sql.lambdas import StatementLambdaElement @@ -1738,14 +1738,15 @@ def cleanup_legacy_states_event_ids(instance: Recorder) -> bool: # Only drop the index if there are no more event_ids in the states table # ex all NULL assert instance.engine is not None, "engine should never be None" - if instance.dialect_name != SupportedDialect.SQLITE: + if instance.dialect_name == SupportedDialect.SQLITE: # SQLite does not support dropping foreign key constraints - # so we can't drop the index at this time but we can avoid - # looking for legacy rows during purge + # so we have to rebuild the table + rebuild_sqlite_table(session_maker, instance.engine, States) + else: _drop_foreign_key_constraints( session_maker, instance.engine, TABLE_STATES, ["event_id"] ) - _drop_index(session_maker, "states", LEGACY_STATES_EVENT_ID_INDEX) + _drop_index(session_maker, "states", LEGACY_STATES_EVENT_ID_INDEX) instance.use_legacy_events_index = False return True @@ -1894,3 +1895,68 @@ def _mark_migration_done( migration_id=migration.migration_id, version=migration.migration_version ) ) + + +def rebuild_sqlite_table( + session_maker: Callable[[], Session], engine: Engine, table: type[Base] +) -> None: + """Rebuild an SQLite table. + + This must only be called after all migrations are complete + and the database is in a consistent state. + + If the table is not migrated to the current schema this + will likely fail. + """ + table_table = cast(Table, table.__table__) + orig_name = table_table.name + temp_name = f"{table_table.name}_temp_{int(time())}" + + _LOGGER.warning( + "Rebuilding SQLite table %s; This will take a while; Please be patient!", + orig_name, + ) + + try: + # 12 step SQLite table rebuild + # https://www.sqlite.org/lang_altertable.html + with session_scope(session=session_maker()) as session: + # Step 1 - Disable foreign keys + session.connection().execute(text("PRAGMA foreign_keys=OFF")) + # Step 2 - create a transaction + with session_scope(session=session_maker()) as session: + # Step 3 - we know all the indexes, triggers, and views associated with table X + new_sql = str(CreateTable(table_table).compile(engine)).strip("\n") + ";" + source_sql = f"CREATE TABLE {orig_name}" + replacement_sql = f"CREATE TABLE {temp_name}" + assert source_sql in new_sql, f"{source_sql} should be in new_sql" + new_sql = new_sql.replace(source_sql, replacement_sql) + # Step 4 - Create temp table + session.execute(text(new_sql)) + column_names = ",".join([column.name for column in table_table.columns]) + # Step 5 - Transfer content + sql = f"INSERT INTO {temp_name} SELECT {column_names} FROM {orig_name};" # noqa: S608 + session.execute(text(sql)) + # Step 6 - Drop the original table + session.execute(text(f"DROP TABLE {orig_name}")) + # Step 7 - Rename the temp table + session.execute(text(f"ALTER TABLE {temp_name} RENAME TO {orig_name}")) + # Step 8 - Recreate indexes + for index in table_table.indexes: + index.create(session.connection()) + # Step 9 - Recreate views (there are none) + # Step 10 - Check foreign keys + session.execute(text("PRAGMA foreign_key_check")) + # Step 11 - Commit transaction + session.commit() + except SQLAlchemyError: + _LOGGER.exception("Error recreating SQLite table %s", table_table.name) + # Swallow the exception since we do not want to ever raise + # an integrity error as it would cause the database + # to be discarded and recreated from scratch + else: + _LOGGER.warning("Rebuilding SQLite table %s finished", orig_name) + finally: + with session_scope(session=session_maker()) as session: + # Step 12 - Re-enable foreign keys + session.connection().execute(text("PRAGMA foreign_keys=ON")) diff --git a/homeassistant/components/smhi/weather.py b/homeassistant/components/smhi/weather.py index 3d5642a2784..aac4c5d24be 100644 --- a/homeassistant/components/smhi/weather.py +++ b/homeassistant/components/smhi/weather.py @@ -218,7 +218,9 @@ class SmhiWeather(WeatherEntity): data.append( { - ATTR_FORECAST_TIME: forecast.valid_time.isoformat(), + ATTR_FORECAST_TIME: forecast.valid_time.replace( + tzinfo=dt_util.UTC + ).isoformat(), ATTR_FORECAST_NATIVE_TEMP: forecast.temperature_max, ATTR_FORECAST_NATIVE_TEMP_LOW: forecast.temperature_min, ATTR_FORECAST_NATIVE_PRECIPITATION: forecast.total_precipitation, diff --git a/homeassistant/components/systemmonitor/manifest.json b/homeassistant/components/systemmonitor/manifest.json index 5e1ef6c02de..236f25bb1ed 100644 --- a/homeassistant/components/systemmonitor/manifest.json +++ b/homeassistant/components/systemmonitor/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/systemmonitor", "iot_class": "local_push", "loggers": ["psutil"], - "requirements": ["psutil-home-assistant==0.0.1", "psutil==5.9.8"] + "requirements": ["psutil-home-assistant==0.0.1", "psutil==6.0.0"] } diff --git a/homeassistant/components/tailscale/manifest.json b/homeassistant/components/tailscale/manifest.json index 14f4206f44f..24f485fcdbd 100644 --- a/homeassistant/components/tailscale/manifest.json +++ b/homeassistant/components/tailscale/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "cloud_polling", "quality_scale": "platinum", - "requirements": ["tailscale==0.6.0"] + "requirements": ["tailscale==0.6.1"] } diff --git a/homeassistant/components/tessie/entity.py b/homeassistant/components/tessie/entity.py index d2a59f205fc..4c077ce19db 100644 --- a/homeassistant/components/tessie/entity.py +++ b/homeassistant/components/tessie/entity.py @@ -42,6 +42,7 @@ class TessieBaseEntity( self.key = key self._attr_translation_key = key super().__init__(coordinator) + self._async_update_attrs() @property def _value(self) -> Any: @@ -132,7 +133,6 @@ class TessieEnergyEntity(TessieBaseEntity): self._attr_device_info = data.device super().__init__(coordinator, key) - self._async_update_attrs() class TessieWallConnectorEntity(TessieBaseEntity): diff --git a/homeassistant/components/unifiprotect/binary_sensor.py b/homeassistant/components/unifiprotect/binary_sensor.py index c4e1aa87df2..75156308b1a 100644 --- a/homeassistant/components/unifiprotect/binary_sensor.py +++ b/homeassistant/components/unifiprotect/binary_sensor.py @@ -284,7 +284,7 @@ CAMERA_SENSORS: tuple[ProtectBinaryEntityDescription, ...] = ( name="Tracking: person", icon="mdi:walk", entity_category=EntityCategory.DIAGNOSTIC, - ufp_required_field="is_ptz", + ufp_required_field="feature_flags.is_ptz", ufp_value="is_person_tracking_enabled", ufp_perm=PermRequired.NO_WRITE, ), diff --git a/homeassistant/components/unifiprotect/switch.py b/homeassistant/components/unifiprotect/switch.py index ca56a602209..50372d47ea8 100644 --- a/homeassistant/components/unifiprotect/switch.py +++ b/homeassistant/components/unifiprotect/switch.py @@ -319,7 +319,7 @@ CAMERA_SWITCHES: tuple[ProtectSwitchEntityDescription, ...] = ( name="Tracking: person", icon="mdi:walk", entity_category=EntityCategory.CONFIG, - ufp_required_field="is_ptz", + ufp_required_field="feature_flags.is_ptz", ufp_value="is_person_tracking_enabled", ufp_set_method="set_person_track", ufp_perm=PermRequired.WRITE, diff --git a/homeassistant/components/upb/config_flow.py b/homeassistant/components/upb/config_flow.py index 1db0b0b6fe3..40f49e57c60 100644 --- a/homeassistant/components/upb/config_flow.py +++ b/homeassistant/components/upb/config_flow.py @@ -39,12 +39,13 @@ async def _validate_input(data): url = _make_url_from_data(data) upb = upb_lib.UpbPim({"url": url, "UPStartExportFile": file_path}) + + upb.connect(_connected_callback) + if not upb.config_ok: _LOGGER.error("Missing or invalid UPB file: %s", file_path) raise InvalidUpbFile - upb.connect(_connected_callback) - with suppress(TimeoutError): async with asyncio.timeout(VALIDATE_TIMEOUT): await connected_event.wait() diff --git a/homeassistant/components/velbus/cover.py b/homeassistant/components/velbus/cover.py index 823d682d339..8b9d927f3d7 100644 --- a/homeassistant/components/velbus/cover.py +++ b/homeassistant/components/velbus/cover.py @@ -66,12 +66,16 @@ class VelbusCover(VelbusEntity, CoverEntity): @property def is_opening(self) -> bool: """Return if the cover is opening.""" - return self._channel.is_opening() + if opening := self._channel.is_opening(): + self._assumed_closed = False + return opening @property def is_closing(self) -> bool: """Return if the cover is closing.""" - return self._channel.is_closing() + if closing := self._channel.is_closing(): + self._assumed_closed = True + return closing @property def current_cover_position(self) -> int | None: @@ -89,13 +93,11 @@ class VelbusCover(VelbusEntity, CoverEntity): async def async_open_cover(self, **kwargs: Any) -> None: """Open the cover.""" await self._channel.open() - self._assumed_closed = False @api_call async def async_close_cover(self, **kwargs: Any) -> None: """Close the cover.""" await self._channel.close() - self._assumed_closed = True @api_call async def async_stop_cover(self, **kwargs: Any) -> None: diff --git a/homeassistant/components/xiaomi_ble/manifest.json b/homeassistant/components/xiaomi_ble/manifest.json index 1e0a09015ee..21e9bc45bb8 100644 --- a/homeassistant/components/xiaomi_ble/manifest.json +++ b/homeassistant/components/xiaomi_ble/manifest.json @@ -24,5 +24,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/xiaomi_ble", "iot_class": "local_push", - "requirements": ["xiaomi-ble==0.30.0"] + "requirements": ["xiaomi-ble==0.30.2"] } diff --git a/homeassistant/const.py b/homeassistant/const.py index 33087b0bfc1..8587f9e6137 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -24,7 +24,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 7 -PATCH_VERSION: Final = "1" +PATCH_VERSION: Final = "2" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 463a38feb9f..0020dc91ccd 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -352,6 +352,7 @@ FLOWS = { "motionblinds_ble", "motioneye", "motionmount", + "mpd", "mqtt", "mullvad", "mutesync", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 0ad8ac09c9e..a3db08e57c2 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -3814,7 +3814,7 @@ "mpd": { "name": "Music Player Daemon (MPD)", "integration_type": "hub", - "config_flow": false, + "config_flow": true, "iot_class": "local_polling" }, "mqtt": { diff --git a/homeassistant/helpers/llm.py b/homeassistant/helpers/llm.py index ba307a785ac..506cadbf168 100644 --- a/homeassistant/helpers/llm.py +++ b/homeassistant/helpers/llm.py @@ -483,7 +483,7 @@ def _get_exposed_entities( if attributes := { attr_name: str(attr_value) - if isinstance(attr_value, (Enum, Decimal)) + if isinstance(attr_value, (Enum, Decimal, int)) else attr_value for attr_name, attr_value in state.attributes.items() if attr_name in interesting_attributes diff --git a/homeassistant/loader.py b/homeassistant/loader.py index 9afad610420..9acc1682602 100644 --- a/homeassistant/loader.py +++ b/homeassistant/loader.py @@ -102,6 +102,23 @@ BLOCKED_CUSTOM_INTEGRATIONS: dict[str, BlockedIntegration] = { "mydolphin_plus": BlockedIntegration( AwesomeVersion("1.0.13"), "crashes Home Assistant" ), + # Added in 2024.7.2 because of + # https://github.com/gcobb321/icloud3/issues/349 + # Note: Current version 3.0.5.2, the fixed version is a guesstimate, + # as no solution is available at time of writing. + "icloud3": BlockedIntegration( + AwesomeVersion("3.0.5.3"), "prevents recorder from working" + ), + # Added in 2024.7.2 because of + # https://github.com/custom-components/places/issues/289 + "places": BlockedIntegration( + AwesomeVersion("2.7.1"), "prevents recorder from working" + ), + # Added in 2024.7.2 because of + # https://github.com/enkama/hass-variables/issues/120 + "variable": BlockedIntegration( + AwesomeVersion("3.4.4"), "prevents recorder from working" + ), } DATA_COMPONENTS: HassKey[dict[str, ModuleType | ComponentProtocol]] = HassKey( diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 41d3af2ad47..6b43f288762 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -32,7 +32,7 @@ habluetooth==3.1.3 hass-nabucasa==0.81.1 hassil==1.7.1 home-assistant-bluetooth==1.12.2 -home-assistant-frontend==20240705.0 +home-assistant-frontend==20240710.0 home-assistant-intents==2024.7.3 httpx==0.27.0 ifaddr==0.2.0 diff --git a/pyproject.toml b/pyproject.toml index 138d1dd80b8..82c29948e3c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.7.1" +version = "2024.7.2" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" diff --git a/requirements_all.txt b/requirements_all.txt index f1d77644104..38f8b6a44cb 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -449,7 +449,7 @@ androidtvremote2==0.1.1 anel-pwrctrl-homeassistant==0.0.1.dev2 # homeassistant.components.anova -anova-wifi==0.15.0 +anova-wifi==0.17.0 # homeassistant.components.anthemav anthemav==1.4.1 @@ -1090,7 +1090,7 @@ hole==0.8.0 holidays==0.52 # homeassistant.components.frontend -home-assistant-frontend==20240705.0 +home-assistant-frontend==20240710.0 # homeassistant.components.conversation home-assistant-intents==2024.7.3 @@ -1179,7 +1179,7 @@ isal==1.6.1 ismartgate==5.0.1 # homeassistant.components.abode -jaraco.abode==5.1.2 +jaraco.abode==5.2.1 # homeassistant.components.jellyfin jellyfin-apiclient-python==1.9.2 @@ -1598,7 +1598,7 @@ proxmoxer==2.0.1 psutil-home-assistant==0.0.1 # homeassistant.components.systemmonitor -psutil==5.9.8 +psutil==6.0.0 # homeassistant.components.pulseaudio_loopback pulsectl==23.5.2 @@ -1827,7 +1827,7 @@ pyeiscp==0.0.7 pyemoncms==0.0.7 # homeassistant.components.enphase_envoy -pyenphase==1.20.3 +pyenphase==1.20.6 # homeassistant.components.envisalink pyenvisalink==4.7 @@ -2004,7 +2004,7 @@ pymitv==1.4.3 pymochad==0.2.0 # homeassistant.components.modbus -pymodbus==3.6.8 +pymodbus==3.6.9 # homeassistant.components.monoprice pymonoprice==0.4 @@ -2683,7 +2683,7 @@ systembridgeconnector==4.0.3 systembridgemodels==4.0.4 # homeassistant.components.tailscale -tailscale==0.6.0 +tailscale==0.6.1 # homeassistant.components.tank_utility tank-utility==1.5.0 @@ -2827,7 +2827,7 @@ vacuum-map-parser-roborock==0.1.2 vallox-websocket-api==5.3.0 # homeassistant.components.rdw -vehicle==2.2.1 +vehicle==2.2.2 # homeassistant.components.velbus velbus-aio==2024.7.5 @@ -2906,7 +2906,7 @@ wyoming==1.5.4 xbox-webapi==2.0.11 # homeassistant.components.xiaomi_ble -xiaomi-ble==0.30.0 +xiaomi-ble==0.30.2 # homeassistant.components.knx xknx==2.12.2 @@ -2933,7 +2933,7 @@ yalesmartalarmclient==0.3.9 yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==6.4.1 +yalexs==6.4.2 # homeassistant.components.yeelight yeelight==0.7.14 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 90e59778751..eb46f1e9c40 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -413,7 +413,7 @@ androidtv[async]==0.0.73 androidtvremote2==0.1.1 # homeassistant.components.anova -anova-wifi==0.15.0 +anova-wifi==0.17.0 # homeassistant.components.anthemav anthemav==1.4.1 @@ -895,7 +895,7 @@ hole==0.8.0 holidays==0.52 # homeassistant.components.frontend -home-assistant-frontend==20240705.0 +home-assistant-frontend==20240710.0 # homeassistant.components.conversation home-assistant-intents==2024.7.3 @@ -966,7 +966,7 @@ isal==1.6.1 ismartgate==5.0.1 # homeassistant.components.abode -jaraco.abode==5.1.2 +jaraco.abode==5.2.1 # homeassistant.components.jellyfin jellyfin-apiclient-python==1.9.2 @@ -1275,7 +1275,7 @@ prometheus-client==0.17.1 psutil-home-assistant==0.0.1 # homeassistant.components.systemmonitor -psutil==5.9.8 +psutil==6.0.0 # homeassistant.components.androidtv pure-python-adb[async]==0.3.0.dev0 @@ -1435,7 +1435,7 @@ pyefergy==22.5.0 pyegps==0.2.5 # homeassistant.components.enphase_envoy -pyenphase==1.20.3 +pyenphase==1.20.6 # homeassistant.components.everlights pyeverlights==0.1.0 @@ -1576,7 +1576,7 @@ pymeteoclimatic==0.1.0 pymochad==0.2.0 # homeassistant.components.modbus -pymodbus==3.6.8 +pymodbus==3.6.9 # homeassistant.components.monoprice pymonoprice==0.4 @@ -2096,7 +2096,7 @@ systembridgeconnector==4.0.3 systembridgemodels==4.0.4 # homeassistant.components.tailscale -tailscale==0.6.0 +tailscale==0.6.1 # homeassistant.components.tellduslive tellduslive==0.10.11 @@ -2201,7 +2201,7 @@ vacuum-map-parser-roborock==0.1.2 vallox-websocket-api==5.3.0 # homeassistant.components.rdw -vehicle==2.2.1 +vehicle==2.2.2 # homeassistant.components.velbus velbus-aio==2024.7.5 @@ -2268,7 +2268,7 @@ wyoming==1.5.4 xbox-webapi==2.0.11 # homeassistant.components.xiaomi_ble -xiaomi-ble==0.30.0 +xiaomi-ble==0.30.2 # homeassistant.components.knx xknx==2.12.2 @@ -2292,7 +2292,7 @@ yalesmartalarmclient==0.3.9 yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==6.4.1 +yalexs==6.4.2 # homeassistant.components.yeelight yeelight==0.7.14 diff --git a/tests/components/august/fixtures/get_lock.low_keypad_battery.json b/tests/components/august/fixtures/get_lock.low_keypad_battery.json index 08bdfaa76ed..43b5513a527 100644 --- a/tests/components/august/fixtures/get_lock.low_keypad_battery.json +++ b/tests/components/august/fixtures/get_lock.low_keypad_battery.json @@ -36,7 +36,7 @@ "currentFirmwareVersion": "2.27.0", "battery": {}, "batteryLevel": "Low", - "batteryRaw": 170 + "batteryRaw": 128 }, "OfflineKeys": { "created": [], diff --git a/tests/components/august/test_sensor.py b/tests/components/august/test_sensor.py index 0227ee64ef1..67223e9dff0 100644 --- a/tests/components/august/test_sensor.py +++ b/tests/components/august/test_sensor.py @@ -88,7 +88,7 @@ async def test_create_lock_with_linked_keypad( assert entry.unique_id == "A6697750D607098BAE8D6BAA11EF8063_device_battery" state = hass.states.get("sensor.front_door_lock_keypad_battery") - assert state.state == "60" + assert state.state == "62" assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE entry = entity_registry.async_get("sensor.front_door_lock_keypad_battery") assert entry diff --git a/tests/components/climate/test_init.py b/tests/components/climate/test_init.py index a459b991203..4756c265aea 100644 --- a/tests/components/climate/test_init.py +++ b/tests/components/climate/test_init.py @@ -709,6 +709,68 @@ async def test_no_warning_integration_has_migrated( ) +async def test_no_warning_integration_implement_feature_flags( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None +) -> None: + """Test no warning when integration uses the correct feature flags.""" + + class MockClimateEntityTest(MockClimateEntity): + """Mock Climate device.""" + + _attr_supported_features = ( + ClimateEntityFeature.FAN_MODE + | ClimateEntityFeature.PRESET_MODE + | ClimateEntityFeature.SWING_MODE + | ClimateEntityFeature.TURN_OFF + | ClimateEntityFeature.TURN_ON + ) + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) + return True + + async def async_setup_entry_climate_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test climate platform via config entry.""" + async_add_entities( + [MockClimateEntityTest(name="test", entity_id="climate.test")] + ) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=async_setup_entry_init, + ), + built_in=False, + ) + mock_platform( + hass, + "test.climate", + MockPlatform(async_setup_entry=async_setup_entry_climate_platform), + ) + + with patch.object( + MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init" + ): + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("climate.test") + assert state is not None + + assert "does not set ClimateEntityFeature" not in caplog.text + assert "implements HVACMode(s):" not in caplog.text + + async def test_turn_on_off_toggle(hass: HomeAssistant) -> None: """Test turn_on/turn_off/toggle methods.""" diff --git a/tests/components/feedreader/test_config_flow.py b/tests/components/feedreader/test_config_flow.py index 48c341492e0..669ca665f6b 100644 --- a/tests/components/feedreader/test_config_flow.py +++ b/tests/components/feedreader/test_config_flow.py @@ -83,16 +83,6 @@ async def test_user_errors( assert result["step_id"] == "user" assert result["errors"] == {"base": "url_error"} - # no feed entries returned - feedparser.side_effect = None - feedparser.return_value = None - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={CONF_URL: URL} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {"base": "no_feed_entries"} - # success feedparser.side_effect = None feedparser.return_value = feed_one_event @@ -141,40 +131,25 @@ async def test_import( assert issue_registry.async_get_issue(HA_DOMAIN, "deprecated_yaml_feedreader") -@pytest.mark.parametrize( - ("side_effect", "return_value", "expected_issue_id"), - [ - ( - urllib.error.URLError("Test"), - None, - "import_yaml_error_feedreader_url_error_http_some_rss_local_rss_feed_xml", - ), - ( - None, - None, - "import_yaml_error_feedreader_no_feed_entries_http_some_rss_local_rss_feed_xml", - ), - ], -) async def test_import_errors( hass: HomeAssistant, issue_registry: ir.IssueRegistry, feedparser, setup_entry, feed_one_event, - side_effect, - return_value, - expected_issue_id, ) -> None: """Test starting an import flow which results in an URL error.""" config_entries = hass.config_entries.async_entries(DOMAIN) assert not config_entries # raise URLError - feedparser.side_effect = side_effect - feedparser.return_value = return_value + feedparser.side_effect = urllib.error.URLError("Test") + feedparser.return_value = None assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_URLS: [URL]}}) - assert issue_registry.async_get_issue(DOMAIN, expected_issue_id) + assert issue_registry.async_get_issue( + DOMAIN, + "import_yaml_error_feedreader_url_error_http_some_rss_local_rss_feed_xml", + ) async def test_reconfigure(hass: HomeAssistant, feedparser) -> None: @@ -248,19 +223,6 @@ async def test_reconfigure_errors( assert result["step_id"] == "reconfigure_confirm" assert result["errors"] == {"base": "url_error"} - # no feed entries returned - feedparser.side_effect = None - feedparser.return_value = None - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_URL: "http://other.rss.local/rss_feed.xml", - }, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" - assert result["errors"] == {"base": "no_feed_entries"} - # success feedparser.side_effect = None feedparser.return_value = feed_one_event diff --git a/tests/components/fully_kiosk/test_camera.py b/tests/components/fully_kiosk/test_camera.py index 4e48749eebb..a2e7067ff1b 100644 --- a/tests/components/fully_kiosk/test_camera.py +++ b/tests/components/fully_kiosk/test_camera.py @@ -2,6 +2,7 @@ from unittest.mock import MagicMock +from fullykiosk import FullyKioskError import pytest from homeassistant.components.camera import async_get_image @@ -41,6 +42,12 @@ async def test_camera( assert mock_fully_kiosk.getCamshot.call_count == 1 assert image.content == b"image_bytes" + fully_kiosk_error = FullyKioskError("error", "status") + mock_fully_kiosk.getCamshot.side_effect = fully_kiosk_error + with pytest.raises(HomeAssistantError) as error: + await async_get_image(hass, entity_camera) + assert error.value.args[0] == fully_kiosk_error + mock_fully_kiosk.getSettings.return_value = {"motionDetection": False} await hass.services.async_call( "camera", diff --git a/tests/components/matter/fixtures/nodes/door-lock.json b/tests/components/matter/fixtures/nodes/door-lock.json index 8a3f0fd68dd..b6231e04af4 100644 --- a/tests/components/matter/fixtures/nodes/door-lock.json +++ b/tests/components/matter/fixtures/nodes/door-lock.json @@ -469,7 +469,7 @@ "1/47/65531": [ 0, 1, 2, 14, 15, 16, 19, 65528, 65529, 65530, 65531, 65532, 65533 ], - "1/257/0": 1, + "1/257/0": 0, "1/257/1": 0, "1/257/2": true, "1/257/3": 1, diff --git a/tests/components/matter/test_door_lock.py b/tests/components/matter/test_door_lock.py index 84f0e58a647..461cc1b7f3d 100644 --- a/tests/components/matter/test_door_lock.py +++ b/tests/components/matter/test_door_lock.py @@ -8,11 +8,10 @@ import pytest from homeassistant.components.lock import ( STATE_LOCKED, - STATE_OPEN, STATE_UNLOCKED, LockEntityFeature, ) -from homeassistant.const import ATTR_CODE, STATE_LOCKING, STATE_UNKNOWN +from homeassistant.const import ATTR_CODE, STATE_LOCKING, STATE_OPENING, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError import homeassistant.helpers.entity_registry as er @@ -64,6 +63,7 @@ async def test_lock( ) matter_client.send_device_command.reset_mock() + await hass.async_block_till_done() state = hass.states.get("lock.mock_door_lock_lock") assert state assert state.state == STATE_LOCKING @@ -208,9 +208,14 @@ async def test_lock_with_unbolt( timed_request_timeout_ms=1000, ) + await hass.async_block_till_done() + state = hass.states.get("lock.mock_door_lock_lock") + assert state + assert state.state == STATE_OPENING + set_node_attribute(door_lock_with_unbolt, 1, 257, 3, 0) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("lock.mock_door_lock_lock") assert state - assert state.state == STATE_OPEN + assert state.state == STATE_LOCKED diff --git a/tests/components/mealie/snapshots/test_calendar.ambr b/tests/components/mealie/snapshots/test_calendar.ambr index c3b26e1e9e2..e5a0a697157 100644 --- a/tests/components/mealie/snapshots/test_calendar.ambr +++ b/tests/components/mealie/snapshots/test_calendar.ambr @@ -252,12 +252,12 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'all_day': True, - 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", - 'end_time': '2024-01-23 00:00:00', + 'description': 'Dineren met de boys', + 'end_time': '2024-01-22 00:00:00', 'friendly_name': 'Mealie Dinner', 'location': '', - 'message': 'Zoete aardappel curry traybake', - 'start_time': '2024-01-22 00:00:00', + 'message': 'Aquavite', + 'start_time': '2024-01-21 00:00:00', }), 'context': , 'entity_id': 'calendar.mealie_dinner', @@ -304,12 +304,12 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'all_day': True, - 'description': 'Te explicamos paso a paso, de manera sencilla, la elaboración de la receta de pollo al curry con leche de coco en 10 minutos. Ingredientes, tiempo de...', - 'end_time': '2024-01-24 00:00:00', + 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', + 'end_time': '2024-01-23 00:00:00', 'friendly_name': 'Mealie Lunch', 'location': '', - 'message': 'Receta de pollo al curry en 10 minutos (con vídeo incluido)', - 'start_time': '2024-01-23 00:00:00', + 'message': 'All-American Beef Stew Recipe', + 'start_time': '2024-01-22 00:00:00', }), 'context': , 'entity_id': 'calendar.mealie_lunch', diff --git a/tests/components/mobile_app/test_webhook.py b/tests/components/mobile_app/test_webhook.py index ca5c9936409..77798c57f10 100644 --- a/tests/components/mobile_app/test_webhook.py +++ b/tests/components/mobile_app/test_webhook.py @@ -10,7 +10,7 @@ from nacl.secret import SecretBox import pytest from homeassistant.components.camera import CameraEntityFeature -from homeassistant.components.mobile_app.const import CONF_SECRET, DOMAIN +from homeassistant.components.mobile_app.const import CONF_SECRET, DATA_DEVICES, DOMAIN from homeassistant.components.tag import EVENT_TAG_SCANNED from homeassistant.components.zone import DOMAIN as ZONE_DOMAIN from homeassistant.const import ( @@ -243,6 +243,7 @@ async def test_webhook_handle_get_config( """Test that we can get config properly.""" webhook_id = create_registrations[1]["webhook_id"] webhook_url = f"/api/webhook/{webhook_id}" + device: dr.DeviceEntry = hass.data[DOMAIN][DATA_DEVICES][webhook_id] # Create two entities for sensor in ( @@ -280,6 +281,7 @@ async def test_webhook_handle_get_config( "latitude": hass_config["latitude"], "longitude": hass_config["longitude"], "elevation": hass_config["elevation"], + "hass_device_id": device.id, "unit_system": hass_config["unit_system"], "location_name": hass_config["location_name"], "time_zone": hass_config["time_zone"], diff --git a/tests/components/recorder/test_migrate.py b/tests/components/recorder/test_migrate.py index a21f4771616..cb8e402f65a 100644 --- a/tests/components/recorder/test_migrate.py +++ b/tests/components/recorder/test_migrate.py @@ -16,7 +16,7 @@ from sqlalchemy.exc import ( ProgrammingError, SQLAlchemyError, ) -from sqlalchemy.orm import Session +from sqlalchemy.orm import Session, scoped_session, sessionmaker from sqlalchemy.pool import StaticPool from homeassistant.bootstrap import async_setup_component @@ -24,6 +24,7 @@ from homeassistant.components import persistent_notification as pn, recorder from homeassistant.components.recorder import db_schema, migration from homeassistant.components.recorder.db_schema import ( SCHEMA_VERSION, + Events, RecorderRuns, States, ) @@ -633,3 +634,89 @@ def test_raise_if_exception_missing_empty_cause_str() -> None: with pytest.raises(ProgrammingError): migration.raise_if_exception_missing_str(programming_exc, ["not present"]) + + +def test_rebuild_sqlite_states_table(recorder_db_url: str) -> None: + """Test that we can rebuild the states table in SQLite.""" + if not recorder_db_url.startswith("sqlite://"): + # This test is specific for SQLite + return + + engine = create_engine(recorder_db_url) + session_maker = scoped_session(sessionmaker(bind=engine, future=True)) + with session_scope(session=session_maker()) as session: + db_schema.Base.metadata.create_all(engine) + with session_scope(session=session_maker()) as session: + session.add(States(state="on")) + session.commit() + + migration.rebuild_sqlite_table(session_maker, engine, States) + + with session_scope(session=session_maker()) as session: + assert session.query(States).count() == 1 + assert session.query(States).first().state == "on" + + engine.dispose() + + +def test_rebuild_sqlite_states_table_missing_fails( + recorder_db_url: str, caplog: pytest.LogCaptureFixture +) -> None: + """Test handling missing states table when attempting rebuild.""" + if not recorder_db_url.startswith("sqlite://"): + # This test is specific for SQLite + return + + engine = create_engine(recorder_db_url) + session_maker = scoped_session(sessionmaker(bind=engine, future=True)) + with session_scope(session=session_maker()) as session: + db_schema.Base.metadata.create_all(engine) + + with session_scope(session=session_maker()) as session: + session.add(Events(event_type="state_changed", event_data="{}")) + session.connection().execute(text("DROP TABLE states")) + session.commit() + + migration.rebuild_sqlite_table(session_maker, engine, States) + assert "Error recreating SQLite table states" in caplog.text + caplog.clear() + + # Now rebuild the events table to make sure the database did not + # get corrupted + migration.rebuild_sqlite_table(session_maker, engine, Events) + + with session_scope(session=session_maker()) as session: + assert session.query(Events).count() == 1 + assert session.query(Events).first().event_type == "state_changed" + assert session.query(Events).first().event_data == "{}" + + engine.dispose() + + +def test_rebuild_sqlite_states_table_extra_columns( + recorder_db_url: str, caplog: pytest.LogCaptureFixture +) -> None: + """Test handling extra columns when rebuilding the states table.""" + if not recorder_db_url.startswith("sqlite://"): + # This test is specific for SQLite + return + + engine = create_engine(recorder_db_url) + session_maker = scoped_session(sessionmaker(bind=engine, future=True)) + with session_scope(session=session_maker()) as session: + db_schema.Base.metadata.create_all(engine) + with session_scope(session=session_maker()) as session: + session.add(States(state="on")) + session.commit() + session.connection().execute( + text("ALTER TABLE states ADD COLUMN extra_column TEXT") + ) + + migration.rebuild_sqlite_table(session_maker, engine, States) + assert "Error recreating SQLite table states" not in caplog.text + + with session_scope(session=session_maker()) as session: + assert session.query(States).count() == 1 + assert session.query(States).first().state == "on" + + engine.dispose() diff --git a/tests/components/recorder/test_v32_migration.py b/tests/components/recorder/test_v32_migration.py index a07c63b3376..e3398fbf0e3 100644 --- a/tests/components/recorder/test_v32_migration.py +++ b/tests/components/recorder/test_v32_migration.py @@ -211,10 +211,9 @@ async def test_migrate_times(caplog: pytest.LogCaptureFixture, tmp_path: Path) - ) states_index_names = {index["name"] for index in states_indexes} - # sqlite does not support dropping foreign keys so the - # ix_states_event_id index is not dropped in this case - # but use_legacy_events_index is still False - assert "ix_states_event_id" in states_index_names + # sqlite does not support dropping foreign keys so we had to + # create a new table and copy the data over + assert "ix_states_event_id" not in states_index_names assert recorder.get_instance(hass).use_legacy_events_index is False @@ -342,8 +341,6 @@ async def test_migrate_can_resume_entity_id_post_migration( await hass.async_stop() await hass.async_block_till_done() - assert "ix_states_entity_id_last_updated_ts" in states_index_names - async with async_test_home_assistant() as hass: recorder_helper.async_initialize_recorder(hass) assert await async_setup_component( diff --git a/tests/components/smhi/snapshots/test_weather.ambr b/tests/components/smhi/snapshots/test_weather.ambr index 0d2f6b3b3bf..d825e22d470 100644 --- a/tests/components/smhi/snapshots/test_weather.ambr +++ b/tests/components/smhi/snapshots/test_weather.ambr @@ -6,7 +6,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'clear-night', - 'datetime': '2023-08-08T00:00:00', + 'datetime': '2023-08-08T00:00:00+00:00', 'humidity': 100, 'precipitation': 0.0, 'pressure': 992.0, @@ -19,7 +19,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'clear-night', - 'datetime': '2023-08-08T01:00:00', + 'datetime': '2023-08-08T01:00:00+00:00', 'humidity': 100, 'precipitation': 0.0, 'pressure': 992.0, @@ -32,7 +32,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'clear-night', - 'datetime': '2023-08-08T02:00:00', + 'datetime': '2023-08-08T02:00:00+00:00', 'humidity': 97, 'precipitation': 0.0, 'pressure': 992.0, @@ -45,7 +45,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'sunny', - 'datetime': '2023-08-08T03:00:00', + 'datetime': '2023-08-08T03:00:00+00:00', 'humidity': 96, 'precipitation': 0.0, 'pressure': 991.0, @@ -223,7 +223,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'cloudy', - 'datetime': '2023-08-07T12:00:00', + 'datetime': '2023-08-07T12:00:00+00:00', 'humidity': 96, 'precipitation': 0.0, 'pressure': 991.0, @@ -236,7 +236,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'rainy', - 'datetime': '2023-08-08T12:00:00', + 'datetime': '2023-08-08T12:00:00+00:00', 'humidity': 97, 'precipitation': 10.6, 'pressure': 984.0, @@ -249,7 +249,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'rainy', - 'datetime': '2023-08-09T12:00:00', + 'datetime': '2023-08-09T12:00:00+00:00', 'humidity': 95, 'precipitation': 6.3, 'pressure': 1001.0, @@ -262,7 +262,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'cloudy', - 'datetime': '2023-08-10T12:00:00', + 'datetime': '2023-08-10T12:00:00+00:00', 'humidity': 75, 'precipitation': 4.8, 'pressure': 1011.0, @@ -275,7 +275,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'cloudy', - 'datetime': '2023-08-11T12:00:00', + 'datetime': '2023-08-11T12:00:00+00:00', 'humidity': 69, 'precipitation': 0.6, 'pressure': 1015.0, @@ -288,7 +288,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'cloudy', - 'datetime': '2023-08-12T12:00:00', + 'datetime': '2023-08-12T12:00:00+00:00', 'humidity': 82, 'precipitation': 0.0, 'pressure': 1014.0, @@ -301,7 +301,7 @@ dict({ 'cloud_coverage': 75, 'condition': 'partlycloudy', - 'datetime': '2023-08-13T12:00:00', + 'datetime': '2023-08-13T12:00:00+00:00', 'humidity': 59, 'precipitation': 0.0, 'pressure': 1013.0, @@ -314,7 +314,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'partlycloudy', - 'datetime': '2023-08-14T12:00:00', + 'datetime': '2023-08-14T12:00:00+00:00', 'humidity': 56, 'precipitation': 0.0, 'pressure': 1015.0, @@ -327,7 +327,7 @@ dict({ 'cloud_coverage': 88, 'condition': 'partlycloudy', - 'datetime': '2023-08-15T12:00:00', + 'datetime': '2023-08-15T12:00:00+00:00', 'humidity': 64, 'precipitation': 3.6, 'pressure': 1014.0, @@ -340,7 +340,7 @@ dict({ 'cloud_coverage': 75, 'condition': 'partlycloudy', - 'datetime': '2023-08-16T12:00:00', + 'datetime': '2023-08-16T12:00:00+00:00', 'humidity': 61, 'precipitation': 2.4, 'pressure': 1014.0, @@ -358,7 +358,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'cloudy', - 'datetime': '2023-08-07T12:00:00', + 'datetime': '2023-08-07T12:00:00+00:00', 'humidity': 96, 'precipitation': 0.0, 'pressure': 991.0, @@ -373,7 +373,7 @@ dict({ 'cloud_coverage': 75, 'condition': 'partlycloudy', - 'datetime': '2023-08-13T12:00:00', + 'datetime': '2023-08-13T12:00:00+00:00', 'humidity': 59, 'precipitation': 0.0, 'pressure': 1013.0, @@ -388,7 +388,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'fog', - 'datetime': '2023-08-07T09:00:00', + 'datetime': '2023-08-07T09:00:00+00:00', 'humidity': 100, 'precipitation': 0.0, 'pressure': 992.0, @@ -403,7 +403,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'cloudy', - 'datetime': '2023-08-07T15:00:00', + 'datetime': '2023-08-07T15:00:00+00:00', 'humidity': 89, 'precipitation': 0.0, 'pressure': 991.0, diff --git a/tests/components/systemmonitor/conftest.py b/tests/components/systemmonitor/conftest.py index e16debdf263..25611481433 100644 --- a/tests/components/systemmonitor/conftest.py +++ b/tests/components/systemmonitor/conftest.py @@ -174,11 +174,11 @@ def mock_psutil(mock_process: list[MockProcess]) -> Generator: "cpu0-thermal": [shwtemp("cpu0-thermal", 50.0, 60.0, 70.0)] } mock_psutil.disk_partitions.return_value = [ - sdiskpart("test", "/", "ext4", "", 1, 1), - sdiskpart("test2", "/media/share", "ext4", "", 1, 1), - sdiskpart("test3", "/incorrect", "", "", 1, 1), - sdiskpart("hosts", "/etc/hosts", "bind", "", 1, 1), - sdiskpart("proc", "/proc/run", "proc", "", 1, 1), + sdiskpart("test", "/", "ext4", ""), + sdiskpart("test2", "/media/share", "ext4", ""), + sdiskpart("test3", "/incorrect", "", ""), + sdiskpart("hosts", "/etc/hosts", "bind", ""), + sdiskpart("proc", "/proc/run", "proc", ""), ] mock_psutil.boot_time.return_value = 1708786800.0 mock_psutil.NoSuchProcess = NoSuchProcess diff --git a/tests/components/systemmonitor/test_util.py b/tests/components/systemmonitor/test_util.py index b35c7b2e96c..582707f3574 100644 --- a/tests/components/systemmonitor/test_util.py +++ b/tests/components/systemmonitor/test_util.py @@ -50,21 +50,19 @@ async def test_disk_util( """Test the disk failures.""" mock_psutil.psutil.disk_partitions.return_value = [ - sdiskpart("test", "/", "ext4", "", 1, 1), # Should be ok - sdiskpart("test2", "/media/share", "ext4", "", 1, 1), # Should be ok - sdiskpart("test3", "/incorrect", "", "", 1, 1), # Should be skipped as no type + sdiskpart("test", "/", "ext4", ""), # Should be ok + sdiskpart("test2", "/media/share", "ext4", ""), # Should be ok + sdiskpart("test3", "/incorrect", "", ""), # Should be skipped as no type sdiskpart( - "proc", "/proc/run", "proc", "", 1, 1 + "proc", "/proc/run", "proc", "" ), # Should be skipped as in skipped disk types sdiskpart( "test4", "/tmpfs/", # noqa: S108 "tmpfs", "", - 1, - 1, ), # Should be skipped as in skipped disk types - sdiskpart("test5", "E:", "cd", "cdrom", 1, 1), # Should be skipped as cdrom + sdiskpart("test5", "E:", "cd", "cdrom"), # Should be skipped as cdrom ] mock_config_entry.add_to_hass(hass) diff --git a/tests/components/tessie/snapshots/test_sensor.ambr b/tests/components/tessie/snapshots/test_sensor.ambr index afe229feba0..0a5ff4603aa 100644 --- a/tests/components/tessie/snapshots/test_sensor.ambr +++ b/tests/components/tessie/snapshots/test_sensor.ambr @@ -2120,7 +2120,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '0.0', }) # --- # name: test_sensors[sensor.wall_connector_power_2-entry] @@ -2177,7 +2177,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '0.0', }) # --- # name: test_sensors[sensor.wall_connector_state-entry] @@ -2249,7 +2249,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': 'disconnected', }) # --- # name: test_sensors[sensor.wall_connector_state_2-entry] @@ -2321,7 +2321,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': 'disconnected', }) # --- # name: test_sensors[sensor.wall_connector_vehicle-entry] diff --git a/tests/helpers/test_llm.py b/tests/helpers/test_llm.py index ad18aa53071..81fa573852e 100644 --- a/tests/helpers/test_llm.py +++ b/tests/helpers/test_llm.py @@ -408,7 +408,7 @@ async def test_assist_api_prompt( hass.states.async_set( entry1.entity_id, "on", - {"friendly_name": "Kitchen", "temperature": Decimal("0.9")}, + {"friendly_name": "Kitchen", "temperature": Decimal("0.9"), "humidity": 65}, ) hass.states.async_set(entry2.entity_id, "on", {"friendly_name": "Living Room"}) @@ -517,9 +517,7 @@ async def test_assist_api_prompt( entry1.entity_id: { "names": "Kitchen", "state": "on", - "attributes": { - "temperature": "0.9", - }, + "attributes": {"temperature": "0.9", "humidity": "65"}, }, entry2.entity_id: { "areas": "Test Area, Alternative name",