Avoid nesting sessions in recorder purge tests (#122581)

This commit is contained in:
Erik Montnemery 2024-07-25 13:12:10 +02:00 committed by GitHub
parent c12a79ecba
commit 1f2c54f112
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 485 additions and 293 deletions

View File

@ -85,12 +85,12 @@ async def test_purge_big_database(hass: HomeAssistant, recorder_mock: Recorder)
with ( with (
patch.object(recorder_mock, "max_bind_vars", 72), patch.object(recorder_mock, "max_bind_vars", 72),
patch.object(recorder_mock.database_engine, "max_bind_vars", 72), patch.object(recorder_mock.database_engine, "max_bind_vars", 72),
session_scope(hass=hass) as session,
): ):
states = session.query(States) with session_scope(hass=hass) as session:
state_attributes = session.query(StateAttributes) states = session.query(States)
assert states.count() == 72 state_attributes = session.query(StateAttributes)
assert state_attributes.count() == 3 assert states.count() == 72
assert state_attributes.count() == 3
purge_before = dt_util.utcnow() - timedelta(days=4) purge_before = dt_util.utcnow() - timedelta(days=4)
@ -102,8 +102,12 @@ async def test_purge_big_database(hass: HomeAssistant, recorder_mock: Recorder)
repack=False, repack=False,
) )
assert not finished assert not finished
assert states.count() == 24
assert state_attributes.count() == 1 with session_scope(hass=hass) as session:
states = session.query(States)
state_attributes = session.query(StateAttributes)
assert states.count() == 24
assert state_attributes.count() == 1
async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> None: async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> None:
@ -122,24 +126,30 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) ->
events = session.query(Events).filter(Events.event_type == "state_changed") events = session.query(Events).filter(Events.event_type == "state_changed")
assert events.count() == 0 assert events.count() == 0
assert "test.recorder2" in recorder_mock.states_manager._last_committed_id
purge_before = dt_util.utcnow() - timedelta(days=4) assert "test.recorder2" in recorder_mock.states_manager._last_committed_id
# run purge_old_data() purge_before = dt_util.utcnow() - timedelta(days=4)
finished = purge_old_data(
recorder_mock, # run purge_old_data()
purge_before, finished = purge_old_data(
states_batch_size=1, recorder_mock,
events_batch_size=1, purge_before,
repack=False, states_batch_size=1,
) events_batch_size=1,
assert not finished repack=False,
)
assert not finished
with session_scope(hass=hass) as session:
states = session.query(States)
state_attributes = session.query(StateAttributes)
assert states.count() == 2 assert states.count() == 2
assert state_attributes.count() == 1 assert state_attributes.count() == 1
assert "test.recorder2" in recorder_mock.states_manager._last_committed_id assert "test.recorder2" in recorder_mock.states_manager._last_committed_id
with session_scope(hass=hass) as session:
states_after_purge = list(session.query(States)) states_after_purge = list(session.query(States))
# Since these states are deleted in batches, we can't guarantee the order # Since these states are deleted in batches, we can't guarantee the order
# but we can look them up by state # but we can look them up by state
@ -150,27 +160,33 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) ->
assert dontpurgeme_5.old_state_id == dontpurgeme_4.state_id assert dontpurgeme_5.old_state_id == dontpurgeme_4.state_id
assert dontpurgeme_4.old_state_id is None assert dontpurgeme_4.old_state_id is None
finished = purge_old_data(recorder_mock, purge_before, repack=False) finished = purge_old_data(recorder_mock, purge_before, repack=False)
assert finished assert finished
with session_scope(hass=hass) as session:
states = session.query(States)
state_attributes = session.query(StateAttributes)
assert states.count() == 2 assert states.count() == 2
assert state_attributes.count() == 1 assert state_attributes.count() == 1
assert "test.recorder2" in recorder_mock.states_manager._last_committed_id assert "test.recorder2" in recorder_mock.states_manager._last_committed_id
# run purge_old_data again # run purge_old_data again
purge_before = dt_util.utcnow() purge_before = dt_util.utcnow()
finished = purge_old_data( finished = purge_old_data(
recorder_mock, recorder_mock,
purge_before, purge_before,
states_batch_size=1, states_batch_size=1,
events_batch_size=1, events_batch_size=1,
repack=False, repack=False,
) )
assert not finished assert not finished
with session_scope(hass=hass) as session:
assert states.count() == 0 assert states.count() == 0
assert state_attributes.count() == 0 assert state_attributes.count() == 0
assert "test.recorder2" not in recorder_mock.states_manager._last_committed_id assert "test.recorder2" not in recorder_mock.states_manager._last_committed_id
# Add some more states # Add some more states
await _add_test_states(hass) await _add_test_states(hass)
@ -290,29 +306,39 @@ async def test_purge_old_events(hass: HomeAssistant, recorder_mock: Recorder) ->
) )
assert events.count() == 6 assert events.count() == 6
purge_before = dt_util.utcnow() - timedelta(days=4) purge_before = dt_util.utcnow() - timedelta(days=4)
# run purge_old_data() # run purge_old_data()
finished = purge_old_data( finished = purge_old_data(
recorder_mock, recorder_mock,
purge_before, purge_before,
repack=False, repack=False,
events_batch_size=1, events_batch_size=1,
states_batch_size=1, states_batch_size=1,
)
assert not finished
with session_scope(hass=hass) as session:
events = session.query(Events).filter(
Events.event_type_id.in_(select_event_type_ids(TEST_EVENT_TYPES))
) )
assert not finished
all_events = events.all() all_events = events.all()
assert events.count() == 2, f"Should have 2 events left: {all_events}" assert events.count() == 2, f"Should have 2 events left: {all_events}"
# we should only have 2 events left # we should only have 2 events left
finished = purge_old_data( finished = purge_old_data(
recorder_mock, recorder_mock,
purge_before, purge_before,
repack=False, repack=False,
events_batch_size=1, events_batch_size=1,
states_batch_size=1, states_batch_size=1,
)
assert finished
with session_scope(hass=hass) as session:
events = session.query(Events).filter(
Events.event_type_id.in_(select_event_type_ids(TEST_EVENT_TYPES))
) )
assert finished
assert events.count() == 2 assert events.count() == 2
@ -327,26 +353,29 @@ async def test_purge_old_recorder_runs(
recorder_runs = session.query(RecorderRuns) recorder_runs = session.query(RecorderRuns)
assert recorder_runs.count() == 7 assert recorder_runs.count() == 7
purge_before = dt_util.utcnow() purge_before = dt_util.utcnow()
# run purge_old_data() # run purge_old_data()
finished = purge_old_data( finished = purge_old_data(
recorder_mock, recorder_mock,
purge_before, purge_before,
repack=False, repack=False,
events_batch_size=1, events_batch_size=1,
states_batch_size=1, states_batch_size=1,
) )
assert not finished assert not finished
finished = purge_old_data( finished = purge_old_data(
recorder_mock, recorder_mock,
purge_before, purge_before,
repack=False, repack=False,
events_batch_size=1, events_batch_size=1,
states_batch_size=1, states_batch_size=1,
) )
assert finished assert finished
with session_scope(hass=hass) as session:
recorder_runs = session.query(RecorderRuns)
assert recorder_runs.count() == 1 assert recorder_runs.count() == 1
@ -361,14 +390,17 @@ async def test_purge_old_statistics_runs(
statistics_runs = session.query(StatisticsRuns) statistics_runs = session.query(StatisticsRuns)
assert statistics_runs.count() == 7 assert statistics_runs.count() == 7
purge_before = dt_util.utcnow() purge_before = dt_util.utcnow()
# run purge_old_data() # run purge_old_data()
finished = purge_old_data(recorder_mock, purge_before, repack=False) finished = purge_old_data(recorder_mock, purge_before, repack=False)
assert not finished assert not finished
finished = purge_old_data(recorder_mock, purge_before, repack=False) finished = purge_old_data(recorder_mock, purge_before, repack=False)
assert finished assert finished
with session_scope(hass=hass) as session:
statistics_runs = session.query(StatisticsRuns)
assert statistics_runs.count() == 1 assert statistics_runs.count() == 1
@ -1655,39 +1687,54 @@ async def test_purge_many_old_events(
) )
assert events.count() == old_events_count * 6 assert events.count() == old_events_count * 6
purge_before = dt_util.utcnow() - timedelta(days=4) purge_before = dt_util.utcnow() - timedelta(days=4)
# run purge_old_data() # run purge_old_data()
finished = purge_old_data( finished = purge_old_data(
recorder_mock, recorder_mock,
purge_before, purge_before,
repack=False, repack=False,
states_batch_size=3, states_batch_size=3,
events_batch_size=3, events_batch_size=3,
)
assert not finished
with session_scope(hass=hass) as session:
events = session.query(Events).filter(
Events.event_type_id.in_(select_event_type_ids(TEST_EVENT_TYPES))
) )
assert not finished
assert events.count() == old_events_count * 3 assert events.count() == old_events_count * 3
# we should only have 2 groups of events left # we should only have 2 groups of events left
finished = purge_old_data( finished = purge_old_data(
recorder_mock, recorder_mock,
purge_before, purge_before,
repack=False, repack=False,
states_batch_size=3, states_batch_size=3,
events_batch_size=3, events_batch_size=3,
)
assert finished
with session_scope(hass=hass) as session:
events = session.query(Events).filter(
Events.event_type_id.in_(select_event_type_ids(TEST_EVENT_TYPES))
) )
assert finished
assert events.count() == old_events_count * 2 assert events.count() == old_events_count * 2
# we should now purge everything # we should now purge everything
finished = purge_old_data( finished = purge_old_data(
recorder_mock, recorder_mock,
dt_util.utcnow(), dt_util.utcnow(),
repack=False, repack=False,
states_batch_size=20, states_batch_size=20,
events_batch_size=20, events_batch_size=20,
)
assert finished
with session_scope(hass=hass) as session:
events = session.query(Events).filter(
Events.event_type_id.in_(select_event_type_ids(TEST_EVENT_TYPES))
) )
assert finished
assert events.count() == 0 assert events.count() == 0
@ -1762,37 +1809,61 @@ async def test_purge_old_events_purges_the_event_type_ids(
assert events.count() == 30 assert events.count() == 30
assert event_types.count() == 4 assert event_types.count() == 4
# run purge_old_data() # run purge_old_data()
finished = purge_old_data( finished = purge_old_data(
recorder_mock, recorder_mock,
far_past, far_past,
repack=False, repack=False,
)
assert finished
with session_scope(hass=hass) as session:
events = session.query(Events).where(
Events.event_type_id.in_(test_event_type_ids)
)
event_types = session.query(EventTypes).where(
EventTypes.event_type_id.in_(test_event_type_ids)
) )
assert finished
assert events.count() == 30 assert events.count() == 30
# We should remove the unused event type # We should remove the unused event type
assert event_types.count() == 3 assert event_types.count() == 3
assert "EVENT_TEST_UNUSED" not in recorder_mock.event_type_manager._id_map assert "EVENT_TEST_UNUSED" not in recorder_mock.event_type_manager._id_map
# we should only have 10 events left since # we should only have 10 events left since
# only one event type was recorded now # only one event type was recorded now
finished = purge_old_data( finished = purge_old_data(
recorder_mock, recorder_mock,
utcnow, utcnow,
repack=False, repack=False,
)
assert finished
with session_scope(hass=hass) as session:
events = session.query(Events).where(
Events.event_type_id.in_(test_event_type_ids)
)
event_types = session.query(EventTypes).where(
EventTypes.event_type_id.in_(test_event_type_ids)
) )
assert finished
assert events.count() == 10 assert events.count() == 10
assert event_types.count() == 1 assert event_types.count() == 1
# Purge everything # Purge everything
finished = purge_old_data( finished = purge_old_data(
recorder_mock, recorder_mock,
utcnow + timedelta(seconds=1), utcnow + timedelta(seconds=1),
repack=False, repack=False,
)
assert finished
with session_scope(hass=hass) as session:
events = session.query(Events).where(
Events.event_type_id.in_(test_event_type_ids)
)
event_types = session.query(EventTypes).where(
EventTypes.event_type_id.in_(test_event_type_ids)
) )
assert finished
assert events.count() == 0 assert events.count() == 0
assert event_types.count() == 0 assert event_types.count() == 0
@ -1864,37 +1935,55 @@ async def test_purge_old_states_purges_the_state_metadata_ids(
assert states.count() == 30 assert states.count() == 30
assert states_meta.count() == 4 assert states_meta.count() == 4
# run purge_old_data() # run purge_old_data()
finished = purge_old_data( finished = purge_old_data(
recorder_mock, recorder_mock,
far_past, far_past,
repack=False, repack=False,
)
assert finished
with session_scope(hass=hass) as session:
states = session.query(States).where(States.metadata_id.in_(test_metadata_ids))
states_meta = session.query(StatesMeta).where(
StatesMeta.metadata_id.in_(test_metadata_ids)
) )
assert finished
assert states.count() == 30 assert states.count() == 30
# We should remove the unused entity_id # We should remove the unused entity_id
assert states_meta.count() == 3 assert states_meta.count() == 3
assert "sensor.unused" not in recorder_mock.event_type_manager._id_map assert "sensor.unused" not in recorder_mock.event_type_manager._id_map
# we should only have 10 states left since # we should only have 10 states left since
# only one event type was recorded now # only one event type was recorded now
finished = purge_old_data( finished = purge_old_data(
recorder_mock, recorder_mock,
utcnow, utcnow,
repack=False, repack=False,
)
assert finished
with session_scope(hass=hass) as session:
states = session.query(States).where(States.metadata_id.in_(test_metadata_ids))
states_meta = session.query(StatesMeta).where(
StatesMeta.metadata_id.in_(test_metadata_ids)
) )
assert finished
assert states.count() == 10 assert states.count() == 10
assert states_meta.count() == 1 assert states_meta.count() == 1
# Purge everything # Purge everything
finished = purge_old_data( finished = purge_old_data(
recorder_mock, recorder_mock,
utcnow + timedelta(seconds=1), utcnow + timedelta(seconds=1),
repack=False, repack=False,
)
assert finished
with session_scope(hass=hass) as session:
states = session.query(States).where(States.metadata_id.in_(test_metadata_ids))
states_meta = session.query(StatesMeta).where(
StatesMeta.metadata_id.in_(test_metadata_ids)
) )
assert finished
assert states.count() == 0 assert states.count() == 0
assert states_meta.count() == 0 assert states_meta.count() == 0

View File

@ -96,17 +96,21 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) ->
assert events.count() == 0 assert events.count() == 0
assert "test.recorder2" in recorder_mock.states_manager._last_committed_id assert "test.recorder2" in recorder_mock.states_manager._last_committed_id
purge_before = dt_util.utcnow() - timedelta(days=4) purge_before = dt_util.utcnow() - timedelta(days=4)
# run purge_old_data() # run purge_old_data()
finished = purge_old_data( finished = purge_old_data(
recorder_mock, recorder_mock,
purge_before, purge_before,
states_batch_size=1, states_batch_size=1,
events_batch_size=1, events_batch_size=1,
repack=False, repack=False,
) )
assert not finished assert not finished
with session_scope(hass=hass) as session:
states = session.query(States)
state_attributes = session.query(StateAttributes)
assert states.count() == 2 assert states.count() == 2
assert state_attributes.count() == 1 assert state_attributes.count() == 1
@ -122,23 +126,31 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) ->
assert dontpurgeme_5.old_state_id == dontpurgeme_4.state_id assert dontpurgeme_5.old_state_id == dontpurgeme_4.state_id
assert dontpurgeme_4.old_state_id is None assert dontpurgeme_4.old_state_id is None
finished = purge_old_data(recorder_mock, purge_before, repack=False) finished = purge_old_data(recorder_mock, purge_before, repack=False)
assert finished assert finished
with session_scope(hass=hass) as session:
states = session.query(States)
state_attributes = session.query(StateAttributes)
assert states.count() == 2 assert states.count() == 2
assert state_attributes.count() == 1 assert state_attributes.count() == 1
assert "test.recorder2" in recorder_mock.states_manager._last_committed_id assert "test.recorder2" in recorder_mock.states_manager._last_committed_id
# run purge_old_data again # run purge_old_data again
purge_before = dt_util.utcnow() purge_before = dt_util.utcnow()
finished = purge_old_data( finished = purge_old_data(
recorder_mock, recorder_mock,
purge_before, purge_before,
states_batch_size=1, states_batch_size=1,
events_batch_size=1, events_batch_size=1,
repack=False, repack=False,
) )
assert not finished assert not finished
with session_scope(hass=hass) as session:
states = session.query(States)
state_attributes = session.query(StateAttributes)
assert states.count() == 0 assert states.count() == 0
assert state_attributes.count() == 0 assert state_attributes.count() == 0
@ -270,26 +282,32 @@ async def test_purge_old_events(hass: HomeAssistant, recorder_mock: Recorder) ->
purge_before = dt_util.utcnow() - timedelta(days=4) purge_before = dt_util.utcnow() - timedelta(days=4)
# run purge_old_data() # run purge_old_data()
finished = purge_old_data( finished = purge_old_data(
recorder_mock, recorder_mock,
purge_before, purge_before,
repack=False, repack=False,
events_batch_size=1, events_batch_size=1,
states_batch_size=1, states_batch_size=1,
) )
assert not finished assert not finished
with session_scope(hass=hass) as session:
events = session.query(Events).filter(Events.event_type.like("EVENT_TEST%"))
assert events.count() == 2 assert events.count() == 2
# we should only have 2 events left # we should only have 2 events left
finished = purge_old_data( finished = purge_old_data(
recorder_mock, recorder_mock,
purge_before, purge_before,
repack=False, repack=False,
events_batch_size=1, events_batch_size=1,
states_batch_size=1, states_batch_size=1,
) )
assert finished assert finished
with session_scope(hass=hass) as session:
events = session.query(Events).filter(Events.event_type.like("EVENT_TEST%"))
assert events.count() == 2 assert events.count() == 2
@ -306,26 +324,29 @@ async def test_purge_old_recorder_runs(
recorder_runs = session.query(RecorderRuns) recorder_runs = session.query(RecorderRuns)
assert recorder_runs.count() == 7 assert recorder_runs.count() == 7
purge_before = dt_util.utcnow() purge_before = dt_util.utcnow()
# run purge_old_data() # run purge_old_data()
finished = purge_old_data( finished = purge_old_data(
recorder_mock, recorder_mock,
purge_before, purge_before,
repack=False, repack=False,
events_batch_size=1, events_batch_size=1,
states_batch_size=1, states_batch_size=1,
) )
assert not finished assert not finished
finished = purge_old_data( finished = purge_old_data(
recorder_mock, recorder_mock,
purge_before, purge_before,
repack=False, repack=False,
events_batch_size=1, events_batch_size=1,
states_batch_size=1, states_batch_size=1,
) )
assert finished assert finished
with session_scope(hass=hass) as session:
recorder_runs = session.query(RecorderRuns)
assert recorder_runs.count() == 1 assert recorder_runs.count() == 1
@ -342,14 +363,17 @@ async def test_purge_old_statistics_runs(
statistics_runs = session.query(StatisticsRuns) statistics_runs = session.query(StatisticsRuns)
assert statistics_runs.count() == 7 assert statistics_runs.count() == 7
purge_before = dt_util.utcnow() purge_before = dt_util.utcnow()
# run purge_old_data() # run purge_old_data()
finished = purge_old_data(recorder_mock, purge_before, repack=False) finished = purge_old_data(recorder_mock, purge_before, repack=False)
assert not finished assert not finished
finished = purge_old_data(recorder_mock, purge_before, repack=False) finished = purge_old_data(recorder_mock, purge_before, repack=False)
assert finished assert finished
with session_scope(hass=hass) as session:
statistics_runs = session.query(StatisticsRuns)
assert statistics_runs.count() == 1 assert statistics_runs.count() == 1
@ -945,39 +969,48 @@ async def test_purge_many_old_events(
events = session.query(Events).filter(Events.event_type.like("EVENT_TEST%")) events = session.query(Events).filter(Events.event_type.like("EVENT_TEST%"))
assert events.count() == old_events_count * 6 assert events.count() == old_events_count * 6
purge_before = dt_util.utcnow() - timedelta(days=4) purge_before = dt_util.utcnow() - timedelta(days=4)
# run purge_old_data() # run purge_old_data()
finished = purge_old_data( finished = purge_old_data(
recorder_mock, recorder_mock,
purge_before, purge_before,
repack=False, repack=False,
states_batch_size=3, states_batch_size=3,
events_batch_size=3, events_batch_size=3,
) )
assert not finished assert not finished
with session_scope(hass=hass) as session:
events = session.query(Events).filter(Events.event_type.like("EVENT_TEST%"))
assert events.count() == old_events_count * 3 assert events.count() == old_events_count * 3
# we should only have 2 groups of events left # we should only have 2 groups of events left
finished = purge_old_data( finished = purge_old_data(
recorder_mock, recorder_mock,
purge_before, purge_before,
repack=False, repack=False,
states_batch_size=3, states_batch_size=3,
events_batch_size=3, events_batch_size=3,
) )
assert finished assert finished
with session_scope(hass=hass) as session:
events = session.query(Events).filter(Events.event_type.like("EVENT_TEST%"))
assert events.count() == old_events_count * 2 assert events.count() == old_events_count * 2
# we should now purge everything # we should now purge everything
finished = purge_old_data( finished = purge_old_data(
recorder_mock, recorder_mock,
dt_util.utcnow(), dt_util.utcnow(),
repack=False, repack=False,
states_batch_size=20, states_batch_size=20,
events_batch_size=20, events_batch_size=20,
) )
assert finished assert finished
with session_scope(hass=hass) as session:
events = session.query(Events).filter(Events.event_type.like("EVENT_TEST%"))
assert events.count() == 0 assert events.count() == 0
@ -1038,39 +1071,65 @@ async def test_purge_can_mix_legacy_and_new_format(
assert states_with_event_id.count() == 50 assert states_with_event_id.count() == 50
assert states_without_event_id.count() == 51 assert states_without_event_id.count() == 51
purge_before = dt_util.utcnow() - timedelta(days=4) purge_before = dt_util.utcnow() - timedelta(days=4)
finished = purge_old_data( finished = purge_old_data(
recorder_mock, recorder_mock,
purge_before, purge_before,
repack=False, repack=False,
)
assert not finished
with session_scope(hass=hass) as session:
states_with_event_id = session.query(States).filter(
States.event_id.is_not(None)
)
states_without_event_id = session.query(States).filter(
States.event_id.is_(None)
) )
assert not finished
assert states_with_event_id.count() == 0 assert states_with_event_id.count() == 0
assert states_without_event_id.count() == 51 assert states_without_event_id.count() == 51
# At this point all the legacy states are gone
# and we switch methods # At this point all the legacy states are gone
purge_before = dt_util.utcnow() - timedelta(days=4) # and we switch methods
finished = purge_old_data( purge_before = dt_util.utcnow() - timedelta(days=4)
recorder_mock, finished = purge_old_data(
purge_before, recorder_mock,
repack=False, purge_before,
events_batch_size=1, repack=False,
states_batch_size=1, events_batch_size=1,
states_batch_size=1,
)
# Since we only allow one iteration, we won't
# check if we are finished this loop similar
# to the legacy method
assert not finished
with session_scope(hass=hass) as session:
states_with_event_id = session.query(States).filter(
States.event_id.is_not(None)
)
states_without_event_id = session.query(States).filter(
States.event_id.is_(None)
) )
# Since we only allow one iteration, we won't
# check if we are finished this loop similar
# to the legacy method
assert not finished
assert states_with_event_id.count() == 0 assert states_with_event_id.count() == 0
assert states_without_event_id.count() == 1 assert states_without_event_id.count() == 1
finished = purge_old_data(
recorder_mock, finished = purge_old_data(
purge_before, recorder_mock,
repack=False, purge_before,
events_batch_size=100, repack=False,
states_batch_size=100, events_batch_size=100,
states_batch_size=100,
)
assert finished
with session_scope(hass=hass) as session:
states_with_event_id = session.query(States).filter(
States.event_id.is_not(None)
)
states_without_event_id = session.query(States).filter(
States.event_id.is_(None)
) )
assert finished
assert states_with_event_id.count() == 0 assert states_with_event_id.count() == 0
assert states_without_event_id.count() == 1 assert states_without_event_id.count() == 1
_add_state_without_event_linkage( _add_state_without_event_linkage(
@ -1078,12 +1137,21 @@ async def test_purge_can_mix_legacy_and_new_format(
) )
assert states_with_event_id.count() == 0 assert states_with_event_id.count() == 0
assert states_without_event_id.count() == 2 assert states_without_event_id.count() == 2
finished = purge_old_data(
recorder_mock, finished = purge_old_data(
purge_before, recorder_mock,
repack=False, purge_before,
repack=False,
)
assert finished
with session_scope(hass=hass) as session:
states_with_event_id = session.query(States).filter(
States.event_id.is_not(None)
)
states_without_event_id = session.query(States).filter(
States.event_id.is_(None)
) )
assert finished
# The broken state without a timestamp # The broken state without a timestamp
# does not prevent future purges. Its ignored. # does not prevent future purges. Its ignored.
assert states_with_event_id.count() == 0 assert states_with_event_id.count() == 0
@ -1185,39 +1253,65 @@ async def test_purge_can_mix_legacy_and_new_format_with_detached_state(
assert states_with_event_id.count() == 52 assert states_with_event_id.count() == 52
assert states_without_event_id.count() == 51 assert states_without_event_id.count() == 51
purge_before = dt_util.utcnow() - timedelta(days=4) purge_before = dt_util.utcnow() - timedelta(days=4)
finished = purge_old_data( finished = purge_old_data(
recorder_mock, recorder_mock,
purge_before, purge_before,
repack=False, repack=False,
)
assert not finished
with session_scope(hass=hass) as session:
states_with_event_id = session.query(States).filter(
States.event_id.is_not(None)
)
states_without_event_id = session.query(States).filter(
States.event_id.is_(None)
) )
assert not finished
assert states_with_event_id.count() == 0 assert states_with_event_id.count() == 0
assert states_without_event_id.count() == 51 assert states_without_event_id.count() == 51
# At this point all the legacy states are gone
# and we switch methods # At this point all the legacy states are gone
purge_before = dt_util.utcnow() - timedelta(days=4) # and we switch methods
finished = purge_old_data( purge_before = dt_util.utcnow() - timedelta(days=4)
recorder_mock, finished = purge_old_data(
purge_before, recorder_mock,
repack=False, purge_before,
events_batch_size=1, repack=False,
states_batch_size=1, events_batch_size=1,
states_batch_size=1,
)
# Since we only allow one iteration, we won't
# check if we are finished this loop similar
# to the legacy method
assert not finished
with session_scope(hass=hass) as session:
states_with_event_id = session.query(States).filter(
States.event_id.is_not(None)
)
states_without_event_id = session.query(States).filter(
States.event_id.is_(None)
) )
# Since we only allow one iteration, we won't
# check if we are finished this loop similar
# to the legacy method
assert not finished
assert states_with_event_id.count() == 0 assert states_with_event_id.count() == 0
assert states_without_event_id.count() == 1 assert states_without_event_id.count() == 1
finished = purge_old_data(
recorder_mock, finished = purge_old_data(
purge_before, recorder_mock,
repack=False, purge_before,
events_batch_size=100, repack=False,
states_batch_size=100, events_batch_size=100,
states_batch_size=100,
)
assert finished
with session_scope(hass=hass) as session:
states_with_event_id = session.query(States).filter(
States.event_id.is_not(None)
)
states_without_event_id = session.query(States).filter(
States.event_id.is_(None)
) )
assert finished
assert states_with_event_id.count() == 0 assert states_with_event_id.count() == 0
assert states_without_event_id.count() == 1 assert states_without_event_id.count() == 1
_add_state_without_event_linkage( _add_state_without_event_linkage(
@ -1225,12 +1319,21 @@ async def test_purge_can_mix_legacy_and_new_format_with_detached_state(
) )
assert states_with_event_id.count() == 0 assert states_with_event_id.count() == 0
assert states_without_event_id.count() == 2 assert states_without_event_id.count() == 2
finished = purge_old_data(
recorder_mock, finished = purge_old_data(
purge_before, recorder_mock,
repack=False, purge_before,
repack=False,
)
assert finished
with session_scope(hass=hass) as session:
states_with_event_id = session.query(States).filter(
States.event_id.is_not(None)
)
states_without_event_id = session.query(States).filter(
States.event_id.is_(None)
) )
assert finished
# The broken state without a timestamp # The broken state without a timestamp
# does not prevent future purges. Its ignored. # does not prevent future purges. Its ignored.
assert states_with_event_id.count() == 0 assert states_with_event_id.count() == 0