Fix DB migration to schema version 29 (#78037)

* Fix DB migration to schema version 29

* Fix misspelled constants
This commit is contained in:
Erik Montnemery 2022-09-09 08:06:14 +02:00 committed by GitHub
parent 0e734e629c
commit eb28d7188b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 15 additions and 9 deletions

View File

@ -104,10 +104,10 @@ class FAST_PYSQLITE_DATETIME(sqlite.DATETIME): # type: ignore[misc]
return lambda value: None if value is None else ciso8601.parse_datetime(value)
JSON_VARIENT_CAST = Text().with_variant(
JSON_VARIANT_CAST = Text().with_variant(
postgresql.JSON(none_as_null=True), "postgresql"
)
JSONB_VARIENT_CAST = Text().with_variant(
JSONB_VARIANT_CAST = Text().with_variant(
postgresql.JSONB(none_as_null=True), "postgresql"
)
DATETIME_TYPE = (
@ -590,17 +590,17 @@ class StatisticsRuns(Base): # type: ignore[misc,valid-type]
EVENT_DATA_JSON = type_coerce(
EventData.shared_data.cast(JSONB_VARIENT_CAST), JSONLiteral(none_as_null=True)
EventData.shared_data.cast(JSONB_VARIANT_CAST), JSONLiteral(none_as_null=True)
)
OLD_FORMAT_EVENT_DATA_JSON = type_coerce(
Events.event_data.cast(JSONB_VARIENT_CAST), JSONLiteral(none_as_null=True)
Events.event_data.cast(JSONB_VARIANT_CAST), JSONLiteral(none_as_null=True)
)
SHARED_ATTRS_JSON = type_coerce(
StateAttributes.shared_attrs.cast(JSON_VARIENT_CAST), JSON(none_as_null=True)
StateAttributes.shared_attrs.cast(JSON_VARIANT_CAST), JSON(none_as_null=True)
)
OLD_FORMAT_ATTRS_JSON = type_coerce(
States.attributes.cast(JSON_VARIENT_CAST), JSON(none_as_null=True)
States.attributes.cast(JSON_VARIANT_CAST), JSON(none_as_null=True)
)
ENTITY_ID_IN_EVENT: Column = EVENT_DATA_JSON["entity_id"]

View File

@ -636,7 +636,7 @@ def _apply_update( # noqa: C901
fake_start_time += timedelta(minutes=5)
# When querying the database, be careful to only explicitly query for columns
# which were present in schema version 21. If querying the table, SQLAlchemy
# which were present in schema version 22. If querying the table, SQLAlchemy
# will refer to future columns.
with session_scope(session=session_maker()) as session:
for sum_statistic in session.query(StatisticsMeta.id).filter_by(

View File

@ -420,6 +420,9 @@ def delete_statistics_duplicates(hass: HomeAssistant, session: Session) -> None:
def _find_statistics_meta_duplicates(session: Session) -> list[int]:
"""Find duplicated statistics_meta."""
# When querying the database, be careful to only explicitly query for columns
# which were present in schema version 29. If querying the table, SQLAlchemy
# will refer to future columns.
subquery = (
session.query(
StatisticsMeta.statistic_id,
@ -430,7 +433,7 @@ def _find_statistics_meta_duplicates(session: Session) -> list[int]:
.subquery()
)
query = (
session.query(StatisticsMeta)
session.query(StatisticsMeta.statistic_id, StatisticsMeta.id)
.outerjoin(
subquery,
(subquery.c.statistic_id == StatisticsMeta.statistic_id),
@ -473,7 +476,10 @@ def _delete_statistics_meta_duplicates(session: Session) -> int:
def delete_statistics_meta_duplicates(session: Session) -> None:
"""Identify and delete duplicated statistics_meta."""
"""Identify and delete duplicated statistics_meta.
This is used when migrating from schema version 28 to schema version 29.
"""
deleted_statistics_rows = _delete_statistics_meta_duplicates(session)
if deleted_statistics_rows:
_LOGGER.info(