Convert recorder statistics tests to use async API (#116925)

This commit is contained in:
Erik Montnemery 2024-05-06 15:10:58 +02:00 committed by GitHub
parent d01d161fe2
commit 9807b2ec11
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -1,6 +1,5 @@
"""The tests for sensor recorder platform.""" """The tests for sensor recorder platform."""
from collections.abc import Callable
from datetime import timedelta from datetime import timedelta
from unittest.mock import patch from unittest.mock import patch
@ -33,22 +32,33 @@ from homeassistant.components.recorder.table_managers.statistics_meta import (
) )
from homeassistant.components.recorder.util import session_scope from homeassistant.components.recorder.util import session_scope
from homeassistant.components.sensor import UNIT_CONVERTERS from homeassistant.components.sensor import UNIT_CONVERTERS
from homeassistant.core import HomeAssistant, callback from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError from homeassistant.exceptions import HomeAssistantError
from homeassistant.setup import setup_component from homeassistant.helpers import entity_registry as er
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util import homeassistant.util.dt as dt_util
from .common import ( from .common import (
assert_dict_of_states_equal_without_context_and_last_changed, assert_dict_of_states_equal_without_context_and_last_changed,
async_record_states,
async_wait_recording_done, async_wait_recording_done,
do_adhoc_statistics, do_adhoc_statistics,
record_states,
statistics_during_period, statistics_during_period,
wait_recording_done,
) )
from tests.common import mock_registry from tests.typing import RecorderInstanceGenerator, WebSocketGenerator
from tests.typing import WebSocketGenerator
@pytest.fixture
async def mock_recorder_before_hass(
async_setup_recorder_instance: RecorderInstanceGenerator,
) -> None:
"""Set up recorder."""
@pytest.fixture
def setup_recorder(recorder_mock: Recorder) -> None:
"""Set up recorder."""
def test_converters_align_with_sensor() -> None: def test_converters_align_with_sensor() -> None:
@ -60,12 +70,14 @@ def test_converters_align_with_sensor() -> None:
assert converter in UNIT_CONVERTERS.values() assert converter in UNIT_CONVERTERS.values()
def test_compile_hourly_statistics(hass_recorder: Callable[..., HomeAssistant]) -> None: async def test_compile_hourly_statistics(
hass: HomeAssistant,
setup_recorder: None,
) -> None:
"""Test compiling hourly statistics.""" """Test compiling hourly statistics."""
hass = hass_recorder()
instance = recorder.get_instance(hass) instance = recorder.get_instance(hass)
setup_component(hass, "sensor", {}) await async_setup_component(hass, "sensor", {})
zero, four, states = record_states(hass) zero, four, states = await async_record_states(hass)
hist = history.get_significant_states(hass, zero, four, list(states)) hist = history.get_significant_states(hass, zero, four, list(states))
assert_dict_of_states_equal_without_context_and_last_changed(states, hist) assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
@ -93,7 +105,7 @@ def test_compile_hourly_statistics(hass_recorder: Callable[..., HomeAssistant])
do_adhoc_statistics(hass, start=zero) do_adhoc_statistics(hass, start=zero)
do_adhoc_statistics(hass, start=four) do_adhoc_statistics(hass, start=four)
wait_recording_done(hass) await async_wait_recording_done(hass)
metadata = get_metadata(hass, statistic_ids={"sensor.test1", "sensor.test2"}) metadata = get_metadata(hass, statistic_ids={"sensor.test1", "sensor.test2"})
assert metadata["sensor.test1"][1]["has_mean"] is True assert metadata["sensor.test1"][1]["has_mean"] is True
@ -320,18 +332,16 @@ def mock_from_stats():
yield yield
def test_compile_periodic_statistics_exception( async def test_compile_periodic_statistics_exception(
hass_recorder: Callable[..., HomeAssistant], mock_sensor_statistics, mock_from_stats hass: HomeAssistant, setup_recorder: None, mock_sensor_statistics, mock_from_stats
) -> None: ) -> None:
"""Test exception handling when compiling periodic statistics.""" """Test exception handling when compiling periodic statistics."""
await async_setup_component(hass, "sensor", {})
hass = hass_recorder()
setup_component(hass, "sensor", {})
now = dt_util.utcnow() now = dt_util.utcnow()
do_adhoc_statistics(hass, start=now) do_adhoc_statistics(hass, start=now)
do_adhoc_statistics(hass, start=now + timedelta(minutes=5)) do_adhoc_statistics(hass, start=now + timedelta(minutes=5))
wait_recording_done(hass) await async_wait_recording_done(hass)
expected_1 = { expected_1 = {
"start": process_timestamp(now).timestamp(), "start": process_timestamp(now).timestamp(),
"end": process_timestamp(now + timedelta(minutes=5)).timestamp(), "end": process_timestamp(now + timedelta(minutes=5)).timestamp(),
@ -364,27 +374,22 @@ def test_compile_periodic_statistics_exception(
} }
def test_rename_entity(hass_recorder: Callable[..., HomeAssistant]) -> None: async def test_rename_entity(
hass: HomeAssistant, entity_registry: er.EntityRegistry, setup_recorder: None
) -> None:
"""Test statistics is migrated when entity_id is changed.""" """Test statistics is migrated when entity_id is changed."""
hass = hass_recorder() await async_setup_component(hass, "sensor", {})
setup_component(hass, "sensor", {})
entity_reg = mock_registry(hass) reg_entry = entity_registry.async_get_or_create(
"sensor",
"test",
"unique_0000",
suggested_object_id="test1",
)
assert reg_entry.entity_id == "sensor.test1"
await hass.async_block_till_done()
@callback zero, four, states = await async_record_states(hass)
def add_entry():
reg_entry = entity_reg.async_get_or_create(
"sensor",
"test",
"unique_0000",
suggested_object_id="test1",
)
assert reg_entry.entity_id == "sensor.test1"
hass.add_job(add_entry)
hass.block_till_done()
zero, four, states = record_states(hass)
hist = history.get_significant_states(hass, zero, four, list(states)) hist = history.get_significant_states(hass, zero, four, list(states))
assert_dict_of_states_equal_without_context_and_last_changed(states, hist) assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
@ -401,7 +406,7 @@ def test_rename_entity(hass_recorder: Callable[..., HomeAssistant]) -> None:
assert stats == {} assert stats == {}
do_adhoc_statistics(hass, start=zero) do_adhoc_statistics(hass, start=zero)
wait_recording_done(hass) await async_wait_recording_done(hass)
expected_1 = { expected_1 = {
"start": process_timestamp(zero).timestamp(), "start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
@ -419,23 +424,19 @@ def test_rename_entity(hass_recorder: Callable[..., HomeAssistant]) -> None:
stats = statistics_during_period(hass, zero, period="5minute") stats = statistics_during_period(hass, zero, period="5minute")
assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2} assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2}
@callback entity_registry.async_update_entity("sensor.test1", new_entity_id="sensor.test99")
def rename_entry(): await async_wait_recording_done(hass)
entity_reg.async_update_entity("sensor.test1", new_entity_id="sensor.test99")
hass.add_job(rename_entry)
wait_recording_done(hass)
stats = statistics_during_period(hass, zero, period="5minute") stats = statistics_during_period(hass, zero, period="5minute")
assert stats == {"sensor.test99": expected_stats99, "sensor.test2": expected_stats2} assert stats == {"sensor.test99": expected_stats99, "sensor.test2": expected_stats2}
def test_statistics_during_period_set_back_compat( async def test_statistics_during_period_set_back_compat(
hass_recorder: Callable[..., HomeAssistant], hass: HomeAssistant,
setup_recorder: None,
) -> None: ) -> None:
"""Test statistics_during_period can handle a list instead of a set.""" """Test statistics_during_period can handle a list instead of a set."""
hass = hass_recorder() await async_setup_component(hass, "sensor", {})
setup_component(hass, "sensor", {})
# This should not throw an exception when passed a list instead of a set # This should not throw an exception when passed a list instead of a set
assert ( assert (
statistics.statistics_during_period( statistics.statistics_during_period(
@ -451,33 +452,29 @@ def test_statistics_during_period_set_back_compat(
) )
def test_rename_entity_collision( async def test_rename_entity_collision(
hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture hass: HomeAssistant,
entity_registry: er.EntityRegistry,
setup_recorder: None,
caplog: pytest.LogCaptureFixture,
) -> None: ) -> None:
"""Test statistics is migrated when entity_id is changed. """Test statistics is migrated when entity_id is changed.
This test relies on the safeguard in the statistics_meta_manager This test relies on the safeguard in the statistics_meta_manager
and should not hit the filter_unique_constraint_integrity_error safeguard. and should not hit the filter_unique_constraint_integrity_error safeguard.
""" """
hass = hass_recorder() await async_setup_component(hass, "sensor", {})
setup_component(hass, "sensor", {})
entity_reg = mock_registry(hass) reg_entry = entity_registry.async_get_or_create(
"sensor",
"test",
"unique_0000",
suggested_object_id="test1",
)
assert reg_entry.entity_id == "sensor.test1"
await hass.async_block_till_done()
@callback zero, four, states = await async_record_states(hass)
def add_entry():
reg_entry = entity_reg.async_get_or_create(
"sensor",
"test",
"unique_0000",
suggested_object_id="test1",
)
assert reg_entry.entity_id == "sensor.test1"
hass.add_job(add_entry)
hass.block_till_done()
zero, four, states = record_states(hass)
hist = history.get_significant_states(hass, zero, four, list(states)) hist = history.get_significant_states(hass, zero, four, list(states))
assert_dict_of_states_equal_without_context_and_last_changed(states, hist) assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
@ -494,7 +491,7 @@ def test_rename_entity_collision(
assert stats == {} assert stats == {}
do_adhoc_statistics(hass, start=zero) do_adhoc_statistics(hass, start=zero)
wait_recording_done(hass) await async_wait_recording_done(hass)
expected_1 = { expected_1 = {
"start": process_timestamp(zero).timestamp(), "start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
@ -525,12 +522,8 @@ def test_rename_entity_collision(
session.add(recorder.db_schema.StatisticsMeta.from_meta(metadata_1)) session.add(recorder.db_schema.StatisticsMeta.from_meta(metadata_1))
# Rename entity sensor.test1 to sensor.test99 # Rename entity sensor.test1 to sensor.test99
@callback entity_registry.async_update_entity("sensor.test1", new_entity_id="sensor.test99")
def rename_entry(): await async_wait_recording_done(hass)
entity_reg.async_update_entity("sensor.test1", new_entity_id="sensor.test99")
hass.add_job(rename_entry)
wait_recording_done(hass)
# Statistics failed to migrate due to the collision # Statistics failed to migrate due to the collision
stats = statistics_during_period(hass, zero, period="5minute") stats = statistics_during_period(hass, zero, period="5minute")
@ -546,33 +539,29 @@ def test_rename_entity_collision(
assert "Blocked attempt to insert duplicated statistic rows" not in caplog.text assert "Blocked attempt to insert duplicated statistic rows" not in caplog.text
def test_rename_entity_collision_states_meta_check_disabled( async def test_rename_entity_collision_states_meta_check_disabled(
hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture hass: HomeAssistant,
entity_registry: er.EntityRegistry,
setup_recorder: None,
caplog: pytest.LogCaptureFixture,
) -> None: ) -> None:
"""Test statistics is migrated when entity_id is changed. """Test statistics is migrated when entity_id is changed.
This test disables the safeguard in the statistics_meta_manager This test disables the safeguard in the statistics_meta_manager
and relies on the filter_unique_constraint_integrity_error safeguard. and relies on the filter_unique_constraint_integrity_error safeguard.
""" """
hass = hass_recorder() await async_setup_component(hass, "sensor", {})
setup_component(hass, "sensor", {})
entity_reg = mock_registry(hass) reg_entry = entity_registry.async_get_or_create(
"sensor",
"test",
"unique_0000",
suggested_object_id="test1",
)
assert reg_entry.entity_id == "sensor.test1"
await hass.async_block_till_done()
@callback zero, four, states = await async_record_states(hass)
def add_entry():
reg_entry = entity_reg.async_get_or_create(
"sensor",
"test",
"unique_0000",
suggested_object_id="test1",
)
assert reg_entry.entity_id == "sensor.test1"
hass.add_job(add_entry)
hass.block_till_done()
zero, four, states = record_states(hass)
hist = history.get_significant_states(hass, zero, four, list(states)) hist = history.get_significant_states(hass, zero, four, list(states))
assert_dict_of_states_equal_without_context_and_last_changed(states, hist) assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
@ -589,7 +578,7 @@ def test_rename_entity_collision_states_meta_check_disabled(
assert stats == {} assert stats == {}
do_adhoc_statistics(hass, start=zero) do_adhoc_statistics(hass, start=zero)
wait_recording_done(hass) await async_wait_recording_done(hass)
expected_1 = { expected_1 = {
"start": process_timestamp(zero).timestamp(), "start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
@ -624,14 +613,10 @@ def test_rename_entity_collision_states_meta_check_disabled(
# so that we hit the filter_unique_constraint_integrity_error safeguard in the statistics # so that we hit the filter_unique_constraint_integrity_error safeguard in the statistics
with patch.object(instance.statistics_meta_manager, "get", return_value=None): with patch.object(instance.statistics_meta_manager, "get", return_value=None):
# Rename entity sensor.test1 to sensor.test99 # Rename entity sensor.test1 to sensor.test99
@callback entity_registry.async_update_entity(
def rename_entry(): "sensor.test1", new_entity_id="sensor.test99"
entity_reg.async_update_entity( )
"sensor.test1", new_entity_id="sensor.test99" await async_wait_recording_done(hass)
)
hass.add_job(rename_entry)
wait_recording_done(hass)
# Statistics failed to migrate due to the collision # Statistics failed to migrate due to the collision
stats = statistics_during_period(hass, zero, period="5minute") stats = statistics_during_period(hass, zero, period="5minute")
@ -647,17 +632,16 @@ def test_rename_entity_collision_states_meta_check_disabled(
) not in caplog.text ) not in caplog.text
def test_statistics_duplicated( async def test_statistics_duplicated(
hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture hass: HomeAssistant, setup_recorder: None, caplog: pytest.LogCaptureFixture
) -> None: ) -> None:
"""Test statistics with same start time is not compiled.""" """Test statistics with same start time is not compiled."""
hass = hass_recorder() await async_setup_component(hass, "sensor", {})
setup_component(hass, "sensor", {}) zero, four, states = await async_record_states(hass)
zero, four, states = record_states(hass)
hist = history.get_significant_states(hass, zero, four, list(states)) hist = history.get_significant_states(hass, zero, four, list(states))
assert_dict_of_states_equal_without_context_and_last_changed(states, hist) assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
wait_recording_done(hass) await async_wait_recording_done(hass)
assert "Compiling statistics for" not in caplog.text assert "Compiling statistics for" not in caplog.text
assert "Statistics already compiled" not in caplog.text assert "Statistics already compiled" not in caplog.text
@ -666,7 +650,7 @@ def test_statistics_duplicated(
return_value=statistics.PlatformCompiledStatistics([], {}), return_value=statistics.PlatformCompiledStatistics([], {}),
) as compile_statistics: ) as compile_statistics:
do_adhoc_statistics(hass, start=zero) do_adhoc_statistics(hass, start=zero)
wait_recording_done(hass) await async_wait_recording_done(hass)
assert compile_statistics.called assert compile_statistics.called
compile_statistics.reset_mock() compile_statistics.reset_mock()
assert "Compiling statistics for" in caplog.text assert "Compiling statistics for" in caplog.text
@ -674,7 +658,7 @@ def test_statistics_duplicated(
caplog.clear() caplog.clear()
do_adhoc_statistics(hass, start=zero) do_adhoc_statistics(hass, start=zero)
wait_recording_done(hass) await async_wait_recording_done(hass)
assert not compile_statistics.called assert not compile_statistics.called
compile_statistics.reset_mock() compile_statistics.reset_mock()
assert "Compiling statistics for" not in caplog.text assert "Compiling statistics for" not in caplog.text
@ -933,12 +917,11 @@ async def test_import_statistics(
} }
def test_external_statistics_errors( async def test_external_statistics_errors(
hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture hass: HomeAssistant, setup_recorder: None, caplog: pytest.LogCaptureFixture
) -> None: ) -> None:
"""Test validation of external statistics.""" """Test validation of external statistics."""
hass = hass_recorder() await async_wait_recording_done(hass)
wait_recording_done(hass)
assert "Compiling statistics for" not in caplog.text assert "Compiling statistics for" not in caplog.text
assert "Statistics already compiled" not in caplog.text assert "Statistics already compiled" not in caplog.text
@ -970,7 +953,7 @@ def test_external_statistics_errors(
external_statistics = {**_external_statistics} external_statistics = {**_external_statistics}
with pytest.raises(HomeAssistantError): with pytest.raises(HomeAssistantError):
async_add_external_statistics(hass, external_metadata, (external_statistics,)) async_add_external_statistics(hass, external_metadata, (external_statistics,))
wait_recording_done(hass) await async_wait_recording_done(hass)
assert statistics_during_period(hass, zero, period="hour") == {} assert statistics_during_period(hass, zero, period="hour") == {}
assert list_statistic_ids(hass) == [] assert list_statistic_ids(hass) == []
assert get_metadata(hass, statistic_ids={"sensor.total_energy_import"}) == {} assert get_metadata(hass, statistic_ids={"sensor.total_energy_import"}) == {}
@ -980,7 +963,7 @@ def test_external_statistics_errors(
external_statistics = {**_external_statistics} external_statistics = {**_external_statistics}
with pytest.raises(HomeAssistantError): with pytest.raises(HomeAssistantError):
async_add_external_statistics(hass, external_metadata, (external_statistics,)) async_add_external_statistics(hass, external_metadata, (external_statistics,))
wait_recording_done(hass) await async_wait_recording_done(hass)
assert statistics_during_period(hass, zero, period="hour") == {} assert statistics_during_period(hass, zero, period="hour") == {}
assert list_statistic_ids(hass) == [] assert list_statistic_ids(hass) == []
assert get_metadata(hass, statistic_ids={"test:total_energy_import"}) == {} assert get_metadata(hass, statistic_ids={"test:total_energy_import"}) == {}
@ -993,7 +976,7 @@ def test_external_statistics_errors(
} }
with pytest.raises(HomeAssistantError): with pytest.raises(HomeAssistantError):
async_add_external_statistics(hass, external_metadata, (external_statistics,)) async_add_external_statistics(hass, external_metadata, (external_statistics,))
wait_recording_done(hass) await async_wait_recording_done(hass)
assert statistics_during_period(hass, zero, period="hour") == {} assert statistics_during_period(hass, zero, period="hour") == {}
assert list_statistic_ids(hass) == [] assert list_statistic_ids(hass) == []
assert get_metadata(hass, statistic_ids={"test:total_energy_import"}) == {} assert get_metadata(hass, statistic_ids={"test:total_energy_import"}) == {}
@ -1003,7 +986,7 @@ def test_external_statistics_errors(
external_statistics = {**_external_statistics, "start": period1.replace(minute=1)} external_statistics = {**_external_statistics, "start": period1.replace(minute=1)}
with pytest.raises(HomeAssistantError): with pytest.raises(HomeAssistantError):
async_add_external_statistics(hass, external_metadata, (external_statistics,)) async_add_external_statistics(hass, external_metadata, (external_statistics,))
wait_recording_done(hass) await async_wait_recording_done(hass)
assert statistics_during_period(hass, zero, period="hour") == {} assert statistics_during_period(hass, zero, period="hour") == {}
assert list_statistic_ids(hass) == [] assert list_statistic_ids(hass) == []
assert get_metadata(hass, statistic_ids={"test:total_energy_import"}) == {} assert get_metadata(hass, statistic_ids={"test:total_energy_import"}) == {}
@ -1016,18 +999,17 @@ def test_external_statistics_errors(
} }
with pytest.raises(HomeAssistantError): with pytest.raises(HomeAssistantError):
async_add_external_statistics(hass, external_metadata, (external_statistics,)) async_add_external_statistics(hass, external_metadata, (external_statistics,))
wait_recording_done(hass) await async_wait_recording_done(hass)
assert statistics_during_period(hass, zero, period="hour") == {} assert statistics_during_period(hass, zero, period="hour") == {}
assert list_statistic_ids(hass) == [] assert list_statistic_ids(hass) == []
assert get_metadata(hass, statistic_ids={"test:total_energy_import"}) == {} assert get_metadata(hass, statistic_ids={"test:total_energy_import"}) == {}
def test_import_statistics_errors( async def test_import_statistics_errors(
hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture hass: HomeAssistant, setup_recorder: None, caplog: pytest.LogCaptureFixture
) -> None: ) -> None:
"""Test validation of imported statistics.""" """Test validation of imported statistics."""
hass = hass_recorder() await async_wait_recording_done(hass)
wait_recording_done(hass)
assert "Compiling statistics for" not in caplog.text assert "Compiling statistics for" not in caplog.text
assert "Statistics already compiled" not in caplog.text assert "Statistics already compiled" not in caplog.text
@ -1059,7 +1041,7 @@ def test_import_statistics_errors(
external_statistics = {**_external_statistics} external_statistics = {**_external_statistics}
with pytest.raises(HomeAssistantError): with pytest.raises(HomeAssistantError):
async_import_statistics(hass, external_metadata, (external_statistics,)) async_import_statistics(hass, external_metadata, (external_statistics,))
wait_recording_done(hass) await async_wait_recording_done(hass)
assert statistics_during_period(hass, zero, period="hour") == {} assert statistics_during_period(hass, zero, period="hour") == {}
assert list_statistic_ids(hass) == [] assert list_statistic_ids(hass) == []
assert get_metadata(hass, statistic_ids={"test:total_energy_import"}) == {} assert get_metadata(hass, statistic_ids={"test:total_energy_import"}) == {}
@ -1069,7 +1051,7 @@ def test_import_statistics_errors(
external_statistics = {**_external_statistics} external_statistics = {**_external_statistics}
with pytest.raises(HomeAssistantError): with pytest.raises(HomeAssistantError):
async_import_statistics(hass, external_metadata, (external_statistics,)) async_import_statistics(hass, external_metadata, (external_statistics,))
wait_recording_done(hass) await async_wait_recording_done(hass)
assert statistics_during_period(hass, zero, period="hour") == {} assert statistics_during_period(hass, zero, period="hour") == {}
assert list_statistic_ids(hass) == [] assert list_statistic_ids(hass) == []
assert get_metadata(hass, statistic_ids={"sensor.total_energy_import"}) == {} assert get_metadata(hass, statistic_ids={"sensor.total_energy_import"}) == {}
@ -1082,7 +1064,7 @@ def test_import_statistics_errors(
} }
with pytest.raises(HomeAssistantError): with pytest.raises(HomeAssistantError):
async_import_statistics(hass, external_metadata, (external_statistics,)) async_import_statistics(hass, external_metadata, (external_statistics,))
wait_recording_done(hass) await async_wait_recording_done(hass)
assert statistics_during_period(hass, zero, period="hour") == {} assert statistics_during_period(hass, zero, period="hour") == {}
assert list_statistic_ids(hass) == [] assert list_statistic_ids(hass) == []
assert get_metadata(hass, statistic_ids={"sensor.total_energy_import"}) == {} assert get_metadata(hass, statistic_ids={"sensor.total_energy_import"}) == {}
@ -1092,7 +1074,7 @@ def test_import_statistics_errors(
external_statistics = {**_external_statistics, "start": period1.replace(minute=1)} external_statistics = {**_external_statistics, "start": period1.replace(minute=1)}
with pytest.raises(HomeAssistantError): with pytest.raises(HomeAssistantError):
async_import_statistics(hass, external_metadata, (external_statistics,)) async_import_statistics(hass, external_metadata, (external_statistics,))
wait_recording_done(hass) await async_wait_recording_done(hass)
assert statistics_during_period(hass, zero, period="hour") == {} assert statistics_during_period(hass, zero, period="hour") == {}
assert list_statistic_ids(hass) == [] assert list_statistic_ids(hass) == []
assert get_metadata(hass, statistic_ids={"sensor.total_energy_import"}) == {} assert get_metadata(hass, statistic_ids={"sensor.total_energy_import"}) == {}
@ -1105,7 +1087,7 @@ def test_import_statistics_errors(
} }
with pytest.raises(HomeAssistantError): with pytest.raises(HomeAssistantError):
async_import_statistics(hass, external_metadata, (external_statistics,)) async_import_statistics(hass, external_metadata, (external_statistics,))
wait_recording_done(hass) await async_wait_recording_done(hass)
assert statistics_during_period(hass, zero, period="hour") == {} assert statistics_during_period(hass, zero, period="hour") == {}
assert list_statistic_ids(hass) == [] assert list_statistic_ids(hass) == []
assert get_metadata(hass, statistic_ids={"sensor.total_energy_import"}) == {} assert get_metadata(hass, statistic_ids={"sensor.total_energy_import"}) == {}
@ -1113,14 +1095,15 @@ def test_import_statistics_errors(
@pytest.mark.parametrize("timezone", ["America/Regina", "Europe/Vienna", "UTC"]) @pytest.mark.parametrize("timezone", ["America/Regina", "Europe/Vienna", "UTC"])
@pytest.mark.freeze_time("2022-10-01 00:00:00+00:00") @pytest.mark.freeze_time("2022-10-01 00:00:00+00:00")
def test_daily_statistics_sum( async def test_daily_statistics_sum(
hass_recorder: Callable[..., HomeAssistant], hass: HomeAssistant,
setup_recorder: None,
caplog: pytest.LogCaptureFixture, caplog: pytest.LogCaptureFixture,
timezone, timezone,
) -> None: ) -> None:
"""Test daily statistics.""" """Test daily statistics."""
hass = hass_recorder(timezone=timezone) hass.config.set_time_zone(timezone)
wait_recording_done(hass) await async_wait_recording_done(hass)
assert "Compiling statistics for" not in caplog.text assert "Compiling statistics for" not in caplog.text
assert "Statistics already compiled" not in caplog.text assert "Statistics already compiled" not in caplog.text
@ -1180,7 +1163,7 @@ def test_daily_statistics_sum(
} }
async_add_external_statistics(hass, external_metadata, external_statistics) async_add_external_statistics(hass, external_metadata, external_statistics)
wait_recording_done(hass) await async_wait_recording_done(hass)
stats = statistics_during_period( stats = statistics_during_period(
hass, zero, period="day", statistic_ids={"test:total_energy_import"} hass, zero, period="day", statistic_ids={"test:total_energy_import"}
) )
@ -1292,14 +1275,15 @@ def test_daily_statistics_sum(
@pytest.mark.parametrize("timezone", ["America/Regina", "Europe/Vienna", "UTC"]) @pytest.mark.parametrize("timezone", ["America/Regina", "Europe/Vienna", "UTC"])
@pytest.mark.freeze_time("2022-10-01 00:00:00+00:00") @pytest.mark.freeze_time("2022-10-01 00:00:00+00:00")
def test_weekly_statistics_mean( async def test_weekly_statistics_mean(
hass_recorder: Callable[..., HomeAssistant], hass: HomeAssistant,
setup_recorder: None,
caplog: pytest.LogCaptureFixture, caplog: pytest.LogCaptureFixture,
timezone, timezone,
) -> None: ) -> None:
"""Test weekly statistics.""" """Test weekly statistics."""
hass = hass_recorder(timezone=timezone) hass.config.set_time_zone(timezone)
wait_recording_done(hass) await async_wait_recording_done(hass)
assert "Compiling statistics for" not in caplog.text assert "Compiling statistics for" not in caplog.text
assert "Statistics already compiled" not in caplog.text assert "Statistics already compiled" not in caplog.text
@ -1349,7 +1333,7 @@ def test_weekly_statistics_mean(
} }
async_add_external_statistics(hass, external_metadata, external_statistics) async_add_external_statistics(hass, external_metadata, external_statistics)
wait_recording_done(hass) await async_wait_recording_done(hass)
# Get all data # Get all data
stats = statistics_during_period( stats = statistics_during_period(
hass, zero, period="week", statistic_ids={"test:total_energy_import"} hass, zero, period="week", statistic_ids={"test:total_energy_import"}
@ -1426,14 +1410,15 @@ def test_weekly_statistics_mean(
@pytest.mark.parametrize("timezone", ["America/Regina", "Europe/Vienna", "UTC"]) @pytest.mark.parametrize("timezone", ["America/Regina", "Europe/Vienna", "UTC"])
@pytest.mark.freeze_time("2022-10-01 00:00:00+00:00") @pytest.mark.freeze_time("2022-10-01 00:00:00+00:00")
def test_weekly_statistics_sum( async def test_weekly_statistics_sum(
hass_recorder: Callable[..., HomeAssistant], hass: HomeAssistant,
setup_recorder: None,
caplog: pytest.LogCaptureFixture, caplog: pytest.LogCaptureFixture,
timezone, timezone,
) -> None: ) -> None:
"""Test weekly statistics.""" """Test weekly statistics."""
hass = hass_recorder(timezone=timezone) hass.config.set_time_zone(timezone)
wait_recording_done(hass) await async_wait_recording_done(hass)
assert "Compiling statistics for" not in caplog.text assert "Compiling statistics for" not in caplog.text
assert "Statistics already compiled" not in caplog.text assert "Statistics already compiled" not in caplog.text
@ -1493,7 +1478,7 @@ def test_weekly_statistics_sum(
} }
async_add_external_statistics(hass, external_metadata, external_statistics) async_add_external_statistics(hass, external_metadata, external_statistics)
wait_recording_done(hass) await async_wait_recording_done(hass)
stats = statistics_during_period( stats = statistics_during_period(
hass, zero, period="week", statistic_ids={"test:total_energy_import"} hass, zero, period="week", statistic_ids={"test:total_energy_import"}
) )
@ -1605,14 +1590,15 @@ def test_weekly_statistics_sum(
@pytest.mark.parametrize("timezone", ["America/Regina", "Europe/Vienna", "UTC"]) @pytest.mark.parametrize("timezone", ["America/Regina", "Europe/Vienna", "UTC"])
@pytest.mark.freeze_time("2021-08-01 00:00:00+00:00") @pytest.mark.freeze_time("2021-08-01 00:00:00+00:00")
def test_monthly_statistics_sum( async def test_monthly_statistics_sum(
hass_recorder: Callable[..., HomeAssistant], hass: HomeAssistant,
setup_recorder: None,
caplog: pytest.LogCaptureFixture, caplog: pytest.LogCaptureFixture,
timezone, timezone,
) -> None: ) -> None:
"""Test monthly statistics.""" """Test monthly statistics."""
hass = hass_recorder(timezone=timezone) hass.config.set_time_zone(timezone)
wait_recording_done(hass) await async_wait_recording_done(hass)
assert "Compiling statistics for" not in caplog.text assert "Compiling statistics for" not in caplog.text
assert "Statistics already compiled" not in caplog.text assert "Statistics already compiled" not in caplog.text
@ -1672,7 +1658,7 @@ def test_monthly_statistics_sum(
} }
async_add_external_statistics(hass, external_metadata, external_statistics) async_add_external_statistics(hass, external_metadata, external_statistics)
wait_recording_done(hass) await async_wait_recording_done(hass)
stats = statistics_during_period( stats = statistics_during_period(
hass, zero, period="month", statistic_ids={"test:total_energy_import"} hass, zero, period="month", statistic_ids={"test:total_energy_import"}
) )
@ -1924,14 +1910,15 @@ def test_cache_key_for_generate_statistics_at_time_stmt() -> None:
@pytest.mark.parametrize("timezone", ["America/Regina", "Europe/Vienna", "UTC"]) @pytest.mark.parametrize("timezone", ["America/Regina", "Europe/Vienna", "UTC"])
@pytest.mark.freeze_time("2022-10-01 00:00:00+00:00") @pytest.mark.freeze_time("2022-10-01 00:00:00+00:00")
def test_change( async def test_change(
hass_recorder: Callable[..., HomeAssistant], hass: HomeAssistant,
setup_recorder: None,
caplog: pytest.LogCaptureFixture, caplog: pytest.LogCaptureFixture,
timezone, timezone,
) -> None: ) -> None:
"""Test deriving change from sum statistic.""" """Test deriving change from sum statistic."""
hass = hass_recorder(timezone=timezone) hass.config.set_time_zone(timezone)
wait_recording_done(hass) await async_wait_recording_done(hass)
assert "Compiling statistics for" not in caplog.text assert "Compiling statistics for" not in caplog.text
assert "Statistics already compiled" not in caplog.text assert "Statistics already compiled" not in caplog.text
@ -1977,7 +1964,7 @@ def test_change(
} }
async_import_statistics(hass, external_metadata, external_statistics) async_import_statistics(hass, external_metadata, external_statistics)
wait_recording_done(hass) await async_wait_recording_done(hass)
# Get change from far in the past # Get change from far in the past
stats = statistics_during_period( stats = statistics_during_period(
hass, hass,
@ -2258,8 +2245,9 @@ def test_change(
@pytest.mark.parametrize("timezone", ["America/Regina", "Europe/Vienna", "UTC"]) @pytest.mark.parametrize("timezone", ["America/Regina", "Europe/Vienna", "UTC"])
@pytest.mark.freeze_time("2022-10-01 00:00:00+00:00") @pytest.mark.freeze_time("2022-10-01 00:00:00+00:00")
def test_change_with_none( async def test_change_with_none(
hass_recorder: Callable[..., HomeAssistant], hass: HomeAssistant,
setup_recorder: None,
caplog: pytest.LogCaptureFixture, caplog: pytest.LogCaptureFixture,
timezone, timezone,
) -> None: ) -> None:
@ -2268,8 +2256,8 @@ def test_change_with_none(
This tests the behavior when some record has None sum. The calculated change This tests the behavior when some record has None sum. The calculated change
is not expected to be correct, but we should not raise on this error. is not expected to be correct, but we should not raise on this error.
""" """
hass = hass_recorder(timezone=timezone) hass.config.set_time_zone(timezone)
wait_recording_done(hass) await async_wait_recording_done(hass)
assert "Compiling statistics for" not in caplog.text assert "Compiling statistics for" not in caplog.text
assert "Statistics already compiled" not in caplog.text assert "Statistics already compiled" not in caplog.text
@ -2315,7 +2303,7 @@ def test_change_with_none(
} }
async_add_external_statistics(hass, external_metadata, external_statistics) async_add_external_statistics(hass, external_metadata, external_statistics)
wait_recording_done(hass) await async_wait_recording_done(hass)
# Get change from far in the past # Get change from far in the past
stats = statistics_during_period( stats = statistics_during_period(
hass, hass,