Drop use of async_setup_recorder_instance fixture in recorder migration tests (#121196)

This commit is contained in:
Erik Montnemery 2024-07-04 17:39:13 +02:00 committed by GitHub
parent 092e362f01
commit a1e6f8c2ec
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 91 additions and 87 deletions

View File

@ -1,6 +1,5 @@
"""The tests for the recorder filter matching the EntityFilter component.""" """The tests for the recorder filter matching the EntityFilter component."""
from collections.abc import AsyncGenerator
import datetime import datetime
import importlib import importlib
import sys import sys
@ -60,6 +59,13 @@ CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine"
SCHEMA_MODULE = "tests.components.recorder.db_schema_32" SCHEMA_MODULE = "tests.components.recorder.db_schema_32"
@pytest.fixture
async def mock_recorder_before_hass(
async_test_recorder: RecorderInstanceGenerator,
) -> None:
"""Set up recorder."""
async def _async_wait_migration_done(hass: HomeAssistant) -> None: async def _async_wait_migration_done(hass: HomeAssistant) -> None:
"""Wait for the migration to be done.""" """Wait for the migration to be done."""
await recorder.get_instance(hass).async_block_till_done() await recorder.get_instance(hass).async_block_till_done()
@ -116,21 +122,11 @@ def db_schema_32():
yield yield
@pytest.fixture(name="legacy_recorder_mock")
async def legacy_recorder_mock_fixture(
recorder_mock: Recorder,
) -> AsyncGenerator[Recorder]:
"""Fixture for legacy recorder mock."""
with patch.object(recorder_mock.states_meta_manager, "active", False):
yield recorder_mock
@pytest.mark.parametrize("enable_migrate_context_ids", [True]) @pytest.mark.parametrize("enable_migrate_context_ids", [True])
async def test_migrate_events_context_ids( async def test_migrate_events_context_ids(
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant hass: HomeAssistant, recorder_mock: Recorder
) -> None: ) -> None:
"""Test we can migrate old uuid context ids and ulid context ids to binary format.""" """Test we can migrate old uuid context ids and ulid context ids to binary format."""
instance = await async_setup_recorder_instance(hass)
await async_wait_recording_done(hass) await async_wait_recording_done(hass)
importlib.import_module(SCHEMA_MODULE) importlib.import_module(SCHEMA_MODULE)
old_db_schema = sys.modules[SCHEMA_MODULE] old_db_schema = sys.modules[SCHEMA_MODULE]
@ -224,7 +220,7 @@ async def test_migrate_events_context_ids(
) )
) )
await instance.async_add_executor_job(_insert_events) await recorder_mock.async_add_executor_job(_insert_events)
await async_wait_recording_done(hass) await async_wait_recording_done(hass)
now = dt_util.utcnow() now = dt_util.utcnow()
@ -233,7 +229,7 @@ async def test_migrate_events_context_ids(
with freeze_time(now): with freeze_time(now):
# This is a threadsafe way to add a task to the recorder # This is a threadsafe way to add a task to the recorder
instance.queue_task(EventsContextIDMigrationTask()) recorder_mock.queue_task(EventsContextIDMigrationTask())
await _async_wait_migration_done(hass) await _async_wait_migration_done(hass)
def _object_as_dict(obj): def _object_as_dict(obj):
@ -260,7 +256,7 @@ async def test_migrate_events_context_ids(
assert len(events) == 6 assert len(events) == 6
return {event.event_type: _object_as_dict(event) for event in events} return {event.event_type: _object_as_dict(event) for event in events}
events_by_type = await instance.async_add_executor_job(_fetch_migrated_events) events_by_type = await recorder_mock.async_add_executor_job(_fetch_migrated_events)
old_uuid_context_id_event = events_by_type["old_uuid_context_id_event"] old_uuid_context_id_event = events_by_type["old_uuid_context_id_event"]
assert old_uuid_context_id_event["context_id"] is None assert old_uuid_context_id_event["context_id"] is None
@ -331,7 +327,9 @@ async def test_migrate_events_context_ids(
event_with_garbage_context_id_no_time_fired_ts["context_parent_id_bin"] is None event_with_garbage_context_id_no_time_fired_ts["context_parent_id_bin"] is None
) )
migration_changes = await instance.async_add_executor_job(_get_migration_id, hass) migration_changes = await recorder_mock.async_add_executor_job(
_get_migration_id, hass
)
assert ( assert (
migration_changes[migration.EventsContextIDMigration.migration_id] migration_changes[migration.EventsContextIDMigration.migration_id]
== migration.EventsContextIDMigration.migration_version == migration.EventsContextIDMigration.migration_version
@ -340,10 +338,9 @@ async def test_migrate_events_context_ids(
@pytest.mark.parametrize("enable_migrate_context_ids", [True]) @pytest.mark.parametrize("enable_migrate_context_ids", [True])
async def test_migrate_states_context_ids( async def test_migrate_states_context_ids(
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant hass: HomeAssistant, recorder_mock: Recorder
) -> None: ) -> None:
"""Test we can migrate old uuid context ids and ulid context ids to binary format.""" """Test we can migrate old uuid context ids and ulid context ids to binary format."""
instance = await async_setup_recorder_instance(hass)
await async_wait_recording_done(hass) await async_wait_recording_done(hass)
importlib.import_module(SCHEMA_MODULE) importlib.import_module(SCHEMA_MODULE)
old_db_schema = sys.modules[SCHEMA_MODULE] old_db_schema = sys.modules[SCHEMA_MODULE]
@ -419,10 +416,10 @@ async def test_migrate_states_context_ids(
) )
) )
await instance.async_add_executor_job(_insert_states) await recorder_mock.async_add_executor_job(_insert_states)
await async_wait_recording_done(hass) await async_wait_recording_done(hass)
instance.queue_task(StatesContextIDMigrationTask()) recorder_mock.queue_task(StatesContextIDMigrationTask())
await _async_wait_migration_done(hass) await _async_wait_migration_done(hass)
def _object_as_dict(obj): def _object_as_dict(obj):
@ -449,7 +446,9 @@ async def test_migrate_states_context_ids(
assert len(events) == 6 assert len(events) == 6
return {state.entity_id: _object_as_dict(state) for state in events} return {state.entity_id: _object_as_dict(state) for state in events}
states_by_entity_id = await instance.async_add_executor_job(_fetch_migrated_states) states_by_entity_id = await recorder_mock.async_add_executor_job(
_fetch_migrated_states
)
old_uuid_context_id = states_by_entity_id["state.old_uuid_context_id"] old_uuid_context_id = states_by_entity_id["state.old_uuid_context_id"]
assert old_uuid_context_id["context_id"] is None assert old_uuid_context_id["context_id"] is None
@ -524,7 +523,9 @@ async def test_migrate_states_context_ids(
== b"\n\xe2\x97\x99\xeeNOE\x81\x16\xf5\x82\xd7\xd3\xeee" == b"\n\xe2\x97\x99\xeeNOE\x81\x16\xf5\x82\xd7\xd3\xeee"
) )
migration_changes = await instance.async_add_executor_job(_get_migration_id, hass) migration_changes = await recorder_mock.async_add_executor_job(
_get_migration_id, hass
)
assert ( assert (
migration_changes[migration.StatesContextIDMigration.migration_id] migration_changes[migration.StatesContextIDMigration.migration_id]
== migration.StatesContextIDMigration.migration_version == migration.StatesContextIDMigration.migration_version
@ -533,10 +534,9 @@ async def test_migrate_states_context_ids(
@pytest.mark.parametrize("enable_migrate_event_type_ids", [True]) @pytest.mark.parametrize("enable_migrate_event_type_ids", [True])
async def test_migrate_event_type_ids( async def test_migrate_event_type_ids(
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant hass: HomeAssistant, recorder_mock: Recorder
) -> None: ) -> None:
"""Test we can migrate event_types to the EventTypes table.""" """Test we can migrate event_types to the EventTypes table."""
instance = await async_setup_recorder_instance(hass)
await async_wait_recording_done(hass) await async_wait_recording_done(hass)
importlib.import_module(SCHEMA_MODULE) importlib.import_module(SCHEMA_MODULE)
old_db_schema = sys.modules[SCHEMA_MODULE] old_db_schema = sys.modules[SCHEMA_MODULE]
@ -563,11 +563,11 @@ async def test_migrate_event_type_ids(
) )
) )
await instance.async_add_executor_job(_insert_events) await recorder_mock.async_add_executor_job(_insert_events)
await async_wait_recording_done(hass) await async_wait_recording_done(hass)
# This is a threadsafe way to add a task to the recorder # This is a threadsafe way to add a task to the recorder
instance.queue_task(EventTypeIDMigrationTask()) recorder_mock.queue_task(EventTypeIDMigrationTask())
await _async_wait_migration_done(hass) await _async_wait_migration_done(hass)
def _fetch_migrated_events(): def _fetch_migrated_events():
@ -599,21 +599,23 @@ async def test_migrate_event_type_ids(
) )
return result return result
events_by_type = await instance.async_add_executor_job(_fetch_migrated_events) events_by_type = await recorder_mock.async_add_executor_job(_fetch_migrated_events)
assert len(events_by_type["event_type_one"]) == 2 assert len(events_by_type["event_type_one"]) == 2
assert len(events_by_type["event_type_two"]) == 1 assert len(events_by_type["event_type_two"]) == 1
def _get_many(): def _get_many():
with session_scope(hass=hass, read_only=True) as session: with session_scope(hass=hass, read_only=True) as session:
return instance.event_type_manager.get_many( return recorder_mock.event_type_manager.get_many(
("event_type_one", "event_type_two"), session ("event_type_one", "event_type_two"), session
) )
mapped = await instance.async_add_executor_job(_get_many) mapped = await recorder_mock.async_add_executor_job(_get_many)
assert mapped["event_type_one"] is not None assert mapped["event_type_one"] is not None
assert mapped["event_type_two"] is not None assert mapped["event_type_two"] is not None
migration_changes = await instance.async_add_executor_job(_get_migration_id, hass) migration_changes = await recorder_mock.async_add_executor_job(
_get_migration_id, hass
)
assert ( assert (
migration_changes[migration.EventTypeIDMigration.migration_id] migration_changes[migration.EventTypeIDMigration.migration_id]
== migration.EventTypeIDMigration.migration_version == migration.EventTypeIDMigration.migration_version
@ -621,11 +623,8 @@ async def test_migrate_event_type_ids(
@pytest.mark.parametrize("enable_migrate_entity_ids", [True]) @pytest.mark.parametrize("enable_migrate_entity_ids", [True])
async def test_migrate_entity_ids( async def test_migrate_entity_ids(hass: HomeAssistant, recorder_mock: Recorder) -> None:
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant
) -> None:
"""Test we can migrate entity_ids to the StatesMeta table.""" """Test we can migrate entity_ids to the StatesMeta table."""
instance = await async_setup_recorder_instance(hass)
await async_wait_recording_done(hass) await async_wait_recording_done(hass)
importlib.import_module(SCHEMA_MODULE) importlib.import_module(SCHEMA_MODULE)
old_db_schema = sys.modules[SCHEMA_MODULE] old_db_schema = sys.modules[SCHEMA_MODULE]
@ -652,11 +651,11 @@ async def test_migrate_entity_ids(
) )
) )
await instance.async_add_executor_job(_insert_states) await recorder_mock.async_add_executor_job(_insert_states)
await _async_wait_migration_done(hass) await _async_wait_migration_done(hass)
# This is a threadsafe way to add a task to the recorder # This is a threadsafe way to add a task to the recorder
instance.queue_task(EntityIDMigrationTask()) recorder_mock.queue_task(EntityIDMigrationTask())
await _async_wait_migration_done(hass) await _async_wait_migration_done(hass)
def _fetch_migrated_states(): def _fetch_migrated_states():
@ -683,11 +682,15 @@ async def test_migrate_entity_ids(
) )
return result return result
states_by_entity_id = await instance.async_add_executor_job(_fetch_migrated_states) states_by_entity_id = await recorder_mock.async_add_executor_job(
_fetch_migrated_states
)
assert len(states_by_entity_id["sensor.two"]) == 2 assert len(states_by_entity_id["sensor.two"]) == 2
assert len(states_by_entity_id["sensor.one"]) == 1 assert len(states_by_entity_id["sensor.one"]) == 1
migration_changes = await instance.async_add_executor_job(_get_migration_id, hass) migration_changes = await recorder_mock.async_add_executor_job(
_get_migration_id, hass
)
assert ( assert (
migration_changes[migration.EntityIDMigration.migration_id] migration_changes[migration.EntityIDMigration.migration_id]
== migration.EntityIDMigration.migration_version == migration.EntityIDMigration.migration_version
@ -696,10 +699,9 @@ async def test_migrate_entity_ids(
@pytest.mark.parametrize("enable_migrate_entity_ids", [True]) @pytest.mark.parametrize("enable_migrate_entity_ids", [True])
async def test_post_migrate_entity_ids( async def test_post_migrate_entity_ids(
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant hass: HomeAssistant, recorder_mock: Recorder
) -> None: ) -> None:
"""Test we can migrate entity_ids to the StatesMeta table.""" """Test we can migrate entity_ids to the StatesMeta table."""
instance = await async_setup_recorder_instance(hass)
await async_wait_recording_done(hass) await async_wait_recording_done(hass)
importlib.import_module(SCHEMA_MODULE) importlib.import_module(SCHEMA_MODULE)
old_db_schema = sys.modules[SCHEMA_MODULE] old_db_schema = sys.modules[SCHEMA_MODULE]
@ -726,11 +728,11 @@ async def test_post_migrate_entity_ids(
) )
) )
await instance.async_add_executor_job(_insert_events) await recorder_mock.async_add_executor_job(_insert_events)
await _async_wait_migration_done(hass) await _async_wait_migration_done(hass)
# This is a threadsafe way to add a task to the recorder # This is a threadsafe way to add a task to the recorder
instance.queue_task(EntityIDPostMigrationTask()) recorder_mock.queue_task(EntityIDPostMigrationTask())
await _async_wait_migration_done(hass) await _async_wait_migration_done(hass)
def _fetch_migrated_states(): def _fetch_migrated_states():
@ -742,7 +744,7 @@ async def test_post_migrate_entity_ids(
assert len(states) == 3 assert len(states) == 3
return {state.state: state.entity_id for state in states} return {state.state: state.entity_id for state in states}
states_by_state = await instance.async_add_executor_job(_fetch_migrated_states) states_by_state = await recorder_mock.async_add_executor_job(_fetch_migrated_states)
assert states_by_state["one_1"] is None assert states_by_state["one_1"] is None
assert states_by_state["two_2"] is None assert states_by_state["two_2"] is None
assert states_by_state["two_1"] is None assert states_by_state["two_1"] is None
@ -750,10 +752,9 @@ async def test_post_migrate_entity_ids(
@pytest.mark.parametrize("enable_migrate_entity_ids", [True]) @pytest.mark.parametrize("enable_migrate_entity_ids", [True])
async def test_migrate_null_entity_ids( async def test_migrate_null_entity_ids(
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant hass: HomeAssistant, recorder_mock: Recorder
) -> None: ) -> None:
"""Test we can migrate entity_ids to the StatesMeta table.""" """Test we can migrate entity_ids to the StatesMeta table."""
instance = await async_setup_recorder_instance(hass)
await async_wait_recording_done(hass) await async_wait_recording_done(hass)
importlib.import_module(SCHEMA_MODULE) importlib.import_module(SCHEMA_MODULE)
old_db_schema = sys.modules[SCHEMA_MODULE] old_db_schema = sys.modules[SCHEMA_MODULE]
@ -783,11 +784,11 @@ async def test_migrate_null_entity_ids(
), ),
) )
await instance.async_add_executor_job(_insert_states) await recorder_mock.async_add_executor_job(_insert_states)
await _async_wait_migration_done(hass) await _async_wait_migration_done(hass)
# This is a threadsafe way to add a task to the recorder # This is a threadsafe way to add a task to the recorder
instance.queue_task(EntityIDMigrationTask()) recorder_mock.queue_task(EntityIDMigrationTask())
await _async_wait_migration_done(hass) await _async_wait_migration_done(hass)
def _fetch_migrated_states(): def _fetch_migrated_states():
@ -814,7 +815,9 @@ async def test_migrate_null_entity_ids(
) )
return result return result
states_by_entity_id = await instance.async_add_executor_job(_fetch_migrated_states) states_by_entity_id = await recorder_mock.async_add_executor_job(
_fetch_migrated_states
)
assert len(states_by_entity_id[migration._EMPTY_ENTITY_ID]) == 1000 assert len(states_by_entity_id[migration._EMPTY_ENTITY_ID]) == 1000
assert len(states_by_entity_id["sensor.one"]) == 2 assert len(states_by_entity_id["sensor.one"]) == 2
@ -822,7 +825,7 @@ async def test_migrate_null_entity_ids(
with session_scope(hass=hass, read_only=True) as session: with session_scope(hass=hass, read_only=True) as session:
return dict(execute_stmt_lambda_element(session, get_migration_changes())) return dict(execute_stmt_lambda_element(session, get_migration_changes()))
migration_changes = await instance.async_add_executor_job(_get_migration_id) migration_changes = await recorder_mock.async_add_executor_job(_get_migration_id)
assert ( assert (
migration_changes[migration.EntityIDMigration.migration_id] migration_changes[migration.EntityIDMigration.migration_id]
== migration.EntityIDMigration.migration_version == migration.EntityIDMigration.migration_version
@ -831,10 +834,9 @@ async def test_migrate_null_entity_ids(
@pytest.mark.parametrize("enable_migrate_event_type_ids", [True]) @pytest.mark.parametrize("enable_migrate_event_type_ids", [True])
async def test_migrate_null_event_type_ids( async def test_migrate_null_event_type_ids(
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant hass: HomeAssistant, recorder_mock: Recorder
) -> None: ) -> None:
"""Test we can migrate event_types to the EventTypes table when the event_type is NULL.""" """Test we can migrate event_types to the EventTypes table when the event_type is NULL."""
instance = await async_setup_recorder_instance(hass)
await async_wait_recording_done(hass) await async_wait_recording_done(hass)
importlib.import_module(SCHEMA_MODULE) importlib.import_module(SCHEMA_MODULE)
old_db_schema = sys.modules[SCHEMA_MODULE] old_db_schema = sys.modules[SCHEMA_MODULE]
@ -864,11 +866,11 @@ async def test_migrate_null_event_type_ids(
), ),
) )
await instance.async_add_executor_job(_insert_events) await recorder_mock.async_add_executor_job(_insert_events)
await _async_wait_migration_done(hass) await _async_wait_migration_done(hass)
# This is a threadsafe way to add a task to the recorder # This is a threadsafe way to add a task to the recorder
instance.queue_task(EventTypeIDMigrationTask()) recorder_mock.queue_task(EventTypeIDMigrationTask())
await _async_wait_migration_done(hass) await _async_wait_migration_done(hass)
def _fetch_migrated_events(): def _fetch_migrated_events():
@ -900,7 +902,7 @@ async def test_migrate_null_event_type_ids(
) )
return result return result
events_by_type = await instance.async_add_executor_job(_fetch_migrated_events) events_by_type = await recorder_mock.async_add_executor_job(_fetch_migrated_events)
assert len(events_by_type["event_type_one"]) == 2 assert len(events_by_type["event_type_one"]) == 2
assert len(events_by_type[migration._EMPTY_EVENT_TYPE]) == 1000 assert len(events_by_type[migration._EMPTY_EVENT_TYPE]) == 1000
@ -908,7 +910,7 @@ async def test_migrate_null_event_type_ids(
with session_scope(hass=hass, read_only=True) as session: with session_scope(hass=hass, read_only=True) as session:
return dict(execute_stmt_lambda_element(session, get_migration_changes())) return dict(execute_stmt_lambda_element(session, get_migration_changes()))
migration_changes = await instance.async_add_executor_job(_get_migration_id) migration_changes = await recorder_mock.async_add_executor_job(_get_migration_id)
assert ( assert (
migration_changes[migration.EventTypeIDMigration.migration_id] migration_changes[migration.EventTypeIDMigration.migration_id]
== migration.EventTypeIDMigration.migration_version == migration.EventTypeIDMigration.migration_version
@ -916,11 +918,9 @@ async def test_migrate_null_event_type_ids(
async def test_stats_timestamp_conversion_is_reentrant( async def test_stats_timestamp_conversion_is_reentrant(
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, recorder_mock: Recorder
hass: HomeAssistant,
) -> None: ) -> None:
"""Test stats migration is reentrant.""" """Test stats migration is reentrant."""
instance = await async_setup_recorder_instance(hass)
await async_wait_recording_done(hass) await async_wait_recording_done(hass)
await async_attach_db_engine(hass) await async_attach_db_engine(hass)
importlib.import_module(SCHEMA_MODULE) importlib.import_module(SCHEMA_MODULE)
@ -932,7 +932,7 @@ async def test_stats_timestamp_conversion_is_reentrant(
def _do_migration(): def _do_migration():
migration._migrate_statistics_columns_to_timestamp_removing_duplicates( migration._migrate_statistics_columns_to_timestamp_removing_duplicates(
hass, instance, instance.get_session, instance.engine hass, recorder_mock, recorder_mock.get_session, recorder_mock.engine
) )
def _insert_fake_metadata(): def _insert_fake_metadata():
@ -1070,11 +1070,9 @@ async def test_stats_timestamp_conversion_is_reentrant(
async def test_stats_timestamp_with_one_by_one( async def test_stats_timestamp_with_one_by_one(
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, recorder_mock: Recorder
hass: HomeAssistant,
) -> None: ) -> None:
"""Test stats migration with one by one.""" """Test stats migration with one by one."""
instance = await async_setup_recorder_instance(hass)
await async_wait_recording_done(hass) await async_wait_recording_done(hass)
await async_attach_db_engine(hass) await async_attach_db_engine(hass)
importlib.import_module(SCHEMA_MODULE) importlib.import_module(SCHEMA_MODULE)
@ -1091,7 +1089,7 @@ async def test_stats_timestamp_with_one_by_one(
side_effect=IntegrityError("test", "test", "test"), side_effect=IntegrityError("test", "test", "test"),
): ):
migration._migrate_statistics_columns_to_timestamp_removing_duplicates( migration._migrate_statistics_columns_to_timestamp_removing_duplicates(
hass, instance, instance.get_session, instance.engine hass, recorder_mock, recorder_mock.get_session, recorder_mock.engine
) )
def _insert_fake_metadata(): def _insert_fake_metadata():
@ -1291,11 +1289,9 @@ async def test_stats_timestamp_with_one_by_one(
async def test_stats_timestamp_with_one_by_one_removes_duplicates( async def test_stats_timestamp_with_one_by_one_removes_duplicates(
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, recorder_mock: Recorder
hass: HomeAssistant,
) -> None: ) -> None:
"""Test stats migration with one by one removes duplicates.""" """Test stats migration with one by one removes duplicates."""
instance = await async_setup_recorder_instance(hass)
await async_wait_recording_done(hass) await async_wait_recording_done(hass)
await async_attach_db_engine(hass) await async_attach_db_engine(hass)
importlib.import_module(SCHEMA_MODULE) importlib.import_module(SCHEMA_MODULE)
@ -1319,7 +1315,7 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates(
), ),
): ):
migration._migrate_statistics_columns_to_timestamp_removing_duplicates( migration._migrate_statistics_columns_to_timestamp_removing_duplicates(
hass, instance, instance.get_session, instance.engine hass, recorder_mock, recorder_mock.get_session, recorder_mock.engine
) )
def _insert_fake_metadata(): def _insert_fake_metadata():

View File

@ -28,6 +28,13 @@ CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine"
SCHEMA_MODULE = "tests.components.recorder.db_schema_32" SCHEMA_MODULE = "tests.components.recorder.db_schema_32"
@pytest.fixture
async def mock_recorder_before_hass(
async_test_recorder: RecorderInstanceGenerator,
) -> None:
"""Set up recorder."""
async def _async_wait_migration_done(hass: HomeAssistant) -> None: async def _async_wait_migration_done(hass: HomeAssistant) -> None:
"""Wait for the migration to be done.""" """Wait for the migration to be done."""
await recorder.get_instance(hass).async_block_till_done() await recorder.get_instance(hass).async_block_till_done()
@ -65,7 +72,7 @@ def _create_engine_test(*args, **kwargs):
@pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.parametrize("persistent_database", [True])
@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage
async def test_migration_changes_prevent_trying_to_migrate_again( async def test_migration_changes_prevent_trying_to_migrate_again(
async_setup_recorder_instance: RecorderInstanceGenerator, async_test_recorder: RecorderInstanceGenerator,
) -> None: ) -> None:
"""Test that we do not try to migrate when migration_changes indicate its already migrated. """Test that we do not try to migrate when migration_changes indicate its already migrated.
@ -76,9 +83,7 @@ async def test_migration_changes_prevent_trying_to_migrate_again(
3. With current schema to verify we do not have to query to see if the migration is done 3. With current schema to verify we do not have to query to see if the migration is done
""" """
config = { config = {recorder.CONF_COMMIT_INTERVAL: 1}
recorder.CONF_COMMIT_INTERVAL: 1,
}
importlib.import_module(SCHEMA_MODULE) importlib.import_module(SCHEMA_MODULE)
old_db_schema = sys.modules[SCHEMA_MODULE] old_db_schema = sys.modules[SCHEMA_MODULE]
@ -97,8 +102,10 @@ async def test_migration_changes_prevent_trying_to_migrate_again(
patch.object(migration.EntityIDMigration, "task", core.RecorderTask), patch.object(migration.EntityIDMigration, "task", core.RecorderTask),
patch(CREATE_ENGINE_TARGET, new=_create_engine_test), patch(CREATE_ENGINE_TARGET, new=_create_engine_test),
): ):
async with async_test_home_assistant() as hass: async with (
await async_setup_recorder_instance(hass, config) async_test_home_assistant() as hass,
async_test_recorder(hass, config),
):
await hass.async_block_till_done() await hass.async_block_till_done()
await async_wait_recording_done(hass) await async_wait_recording_done(hass)
await _async_wait_migration_done(hass) await _async_wait_migration_done(hass)
@ -107,8 +114,7 @@ async def test_migration_changes_prevent_trying_to_migrate_again(
await hass.async_stop() await hass.async_stop()
# Now start again with current db schema # Now start again with current db schema
async with async_test_home_assistant() as hass: async with async_test_home_assistant() as hass, async_test_recorder(hass, config):
await async_setup_recorder_instance(hass, config)
await hass.async_block_till_done() await hass.async_block_till_done()
await async_wait_recording_done(hass) await async_wait_recording_done(hass)
await _async_wait_migration_done(hass) await _async_wait_migration_done(hass)
@ -132,19 +138,21 @@ async def test_migration_changes_prevent_trying_to_migrate_again(
original_queue_task(self, task) original_queue_task(self, task)
# Finally verify we did not call needs_migrate_query on StatesContextIDMigration # Finally verify we did not call needs_migrate_query on StatesContextIDMigration
async with async_test_home_assistant() as hass: with (
with ( patch(
patch( "homeassistant.components.recorder.core.Recorder.queue_task",
"homeassistant.components.recorder.core.Recorder.queue_task", _queue_task,
_queue_task, ),
), patch.object(
patch.object( migration.StatesContextIDMigration,
migration.StatesContextIDMigration, "needs_migrate_query",
"needs_migrate_query", side_effect=RuntimeError("Should not be called"),
side_effect=RuntimeError("Should not be called"), ),
), ):
async with (
async_test_home_assistant() as hass,
async_test_recorder(hass, config),
): ):
await async_setup_recorder_instance(hass, config)
await hass.async_block_till_done() await hass.async_block_till_done()
await async_wait_recording_done(hass) await async_wait_recording_done(hass)
await _async_wait_migration_done(hass) await _async_wait_migration_done(hass)