diff --git a/homeassistant/components/recorder/db_schema.py b/homeassistant/components/recorder/db_schema.py index 4777eeb500e..40c0453ea0b 100644 --- a/homeassistant/components/recorder/db_schema.py +++ b/homeassistant/components/recorder/db_schema.py @@ -104,10 +104,10 @@ class FAST_PYSQLITE_DATETIME(sqlite.DATETIME): # type: ignore[misc] return lambda value: None if value is None else ciso8601.parse_datetime(value) -JSON_VARIENT_CAST = Text().with_variant( +JSON_VARIANT_CAST = Text().with_variant( postgresql.JSON(none_as_null=True), "postgresql" ) -JSONB_VARIENT_CAST = Text().with_variant( +JSONB_VARIANT_CAST = Text().with_variant( postgresql.JSONB(none_as_null=True), "postgresql" ) DATETIME_TYPE = ( @@ -590,17 +590,17 @@ class StatisticsRuns(Base): # type: ignore[misc,valid-type] EVENT_DATA_JSON = type_coerce( - EventData.shared_data.cast(JSONB_VARIENT_CAST), JSONLiteral(none_as_null=True) + EventData.shared_data.cast(JSONB_VARIANT_CAST), JSONLiteral(none_as_null=True) ) OLD_FORMAT_EVENT_DATA_JSON = type_coerce( - Events.event_data.cast(JSONB_VARIENT_CAST), JSONLiteral(none_as_null=True) + Events.event_data.cast(JSONB_VARIANT_CAST), JSONLiteral(none_as_null=True) ) SHARED_ATTRS_JSON = type_coerce( - StateAttributes.shared_attrs.cast(JSON_VARIENT_CAST), JSON(none_as_null=True) + StateAttributes.shared_attrs.cast(JSON_VARIANT_CAST), JSON(none_as_null=True) ) OLD_FORMAT_ATTRS_JSON = type_coerce( - States.attributes.cast(JSON_VARIENT_CAST), JSON(none_as_null=True) + States.attributes.cast(JSON_VARIANT_CAST), JSON(none_as_null=True) ) ENTITY_ID_IN_EVENT: Column = EVENT_DATA_JSON["entity_id"] diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 6e4a67c9da5..ab9b93de5e5 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -636,7 +636,7 @@ def _apply_update( # noqa: C901 fake_start_time += timedelta(minutes=5) # When querying the database, be careful to only explicitly query for columns - # which were present in schema version 21. If querying the table, SQLAlchemy + # which were present in schema version 22. If querying the table, SQLAlchemy # will refer to future columns. with session_scope(session=session_maker()) as session: for sum_statistic in session.query(StatisticsMeta.id).filter_by( diff --git a/homeassistant/components/recorder/statistics.py b/homeassistant/components/recorder/statistics.py index 0a9e29747a9..a1ab58ee011 100644 --- a/homeassistant/components/recorder/statistics.py +++ b/homeassistant/components/recorder/statistics.py @@ -420,6 +420,9 @@ def delete_statistics_duplicates(hass: HomeAssistant, session: Session) -> None: def _find_statistics_meta_duplicates(session: Session) -> list[int]: """Find duplicated statistics_meta.""" + # When querying the database, be careful to only explicitly query for columns + # which were present in schema version 29. If querying the table, SQLAlchemy + # will refer to future columns. subquery = ( session.query( StatisticsMeta.statistic_id, @@ -430,7 +433,7 @@ def _find_statistics_meta_duplicates(session: Session) -> list[int]: .subquery() ) query = ( - session.query(StatisticsMeta) + session.query(StatisticsMeta.statistic_id, StatisticsMeta.id) .outerjoin( subquery, (subquery.c.statistic_id == StatisticsMeta.statistic_id), @@ -473,7 +476,10 @@ def _delete_statistics_meta_duplicates(session: Session) -> int: def delete_statistics_meta_duplicates(session: Session) -> None: - """Identify and delete duplicated statistics_meta.""" + """Identify and delete duplicated statistics_meta. + + This is used when migrating from schema version 28 to schema version 29. + """ deleted_statistics_rows = _delete_statistics_meta_duplicates(session) if deleted_statistics_rows: _LOGGER.info(