mirror of
https://github.com/home-assistant/core.git
synced 2025-07-16 09:47:13 +00:00
Fix migration failing when existing data has duplicates (#88712)
This commit is contained in:
parent
e69271cb46
commit
0c6a469218
@ -13,6 +13,7 @@ from sqlalchemy import ForeignKeyConstraint, MetaData, Table, func, text
|
|||||||
from sqlalchemy.engine import CursorResult, Engine
|
from sqlalchemy.engine import CursorResult, Engine
|
||||||
from sqlalchemy.exc import (
|
from sqlalchemy.exc import (
|
||||||
DatabaseError,
|
DatabaseError,
|
||||||
|
IntegrityError,
|
||||||
InternalError,
|
InternalError,
|
||||||
OperationalError,
|
OperationalError,
|
||||||
ProgrammingError,
|
ProgrammingError,
|
||||||
@ -778,9 +779,10 @@ def _apply_update( # noqa: C901
|
|||||||
# Add name column to StatisticsMeta
|
# Add name column to StatisticsMeta
|
||||||
_add_columns(session_maker, "statistics_meta", ["name VARCHAR(255)"])
|
_add_columns(session_maker, "statistics_meta", ["name VARCHAR(255)"])
|
||||||
elif new_version == 24:
|
elif new_version == 24:
|
||||||
_LOGGER.debug("Deleting duplicated statistics entries")
|
# This used to create the unique indices for start and statistic_id
|
||||||
with session_scope(session=session_maker()) as session:
|
# but we changed the format in schema 34 which will now take care
|
||||||
delete_statistics_duplicates(hass, session)
|
# of removing any duplicate if they still exist.
|
||||||
|
pass
|
||||||
elif new_version == 25:
|
elif new_version == 25:
|
||||||
_add_columns(session_maker, "states", [f"attributes_id {big_int}"])
|
_add_columns(session_maker, "states", [f"attributes_id {big_int}"])
|
||||||
_create_index(session_maker, "states", "ix_states_attributes_id")
|
_create_index(session_maker, "states", "ix_states_attributes_id")
|
||||||
@ -907,7 +909,26 @@ def _apply_update( # noqa: C901
|
|||||||
"statistics_short_term",
|
"statistics_short_term",
|
||||||
"ix_statistics_short_term_statistic_id_start_ts",
|
"ix_statistics_short_term_statistic_id_start_ts",
|
||||||
)
|
)
|
||||||
|
try:
|
||||||
_migrate_statistics_columns_to_timestamp(session_maker, engine)
|
_migrate_statistics_columns_to_timestamp(session_maker, engine)
|
||||||
|
except IntegrityError as ex:
|
||||||
|
_LOGGER.error(
|
||||||
|
"Statistics table contains duplicate entries: %s; "
|
||||||
|
"Cleaning up duplicates and trying again; "
|
||||||
|
"This will take a while; "
|
||||||
|
"Please be patient!",
|
||||||
|
ex,
|
||||||
|
)
|
||||||
|
# There may be duplicated statistics entries, delete duplicates
|
||||||
|
# and try again
|
||||||
|
with session_scope(session=session_maker()) as session:
|
||||||
|
delete_statistics_duplicates(hass, session)
|
||||||
|
_migrate_statistics_columns_to_timestamp(session_maker, engine)
|
||||||
|
# Log at error level to ensure the user sees this message in the log
|
||||||
|
# since we logged the error above.
|
||||||
|
_LOGGER.error(
|
||||||
|
"Statistics migration successfully recovered after statistics table duplicate cleanup"
|
||||||
|
)
|
||||||
elif new_version == 35:
|
elif new_version == 35:
|
||||||
# Migration is done in two steps to ensure we can start using
|
# Migration is done in two steps to ensure we can start using
|
||||||
# the new columns before we wipe the old ones.
|
# the new columns before we wipe the old ones.
|
||||||
|
@ -125,7 +125,7 @@ def session_scope(
|
|||||||
need_rollback = True
|
need_rollback = True
|
||||||
session.commit()
|
session.commit()
|
||||||
except Exception as err: # pylint: disable=broad-except
|
except Exception as err: # pylint: disable=broad-except
|
||||||
_LOGGER.error("Error executing query: %s", err)
|
_LOGGER.error("Error executing query: %s", err, exc_info=True)
|
||||||
if need_rollback:
|
if need_rollback:
|
||||||
session.rollback()
|
session.rollback()
|
||||||
if not exception_filter or not exception_filter(err):
|
if not exception_filter or not exception_filter(err):
|
||||||
|
Loading…
x
Reference in New Issue
Block a user