Correct fallback to state in state machine when processing statistics (#140396)

This commit is contained in:
Erik Montnemery 2025-03-13 09:28:15 +01:00 committed by GitHub
parent ffa6f42c0e
commit 427aa55789
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 104 additions and 18 deletions

View File

@ -134,16 +134,7 @@ def _time_weighted_average(
duration = end - old_start_time
accumulated += old_fstate * duration.total_seconds()
period_seconds = (end - start).total_seconds()
if period_seconds == 0:
# If the only state changed that happened was at the exact moment
# at the end of the period, we can't calculate a meaningful average
# so we return 0.0 since it represents a time duration smaller than
# we can measure. This probably means the precision of statistics
# column schema in the database is incorrect but it is actually possible
# to happen if the state change event fired at the exact microsecond
return 0.0
return accumulated / period_seconds
return accumulated / (end - start).total_seconds()
def _get_units(fstates: list[tuple[float, State]]) -> set[str | None]:
@ -447,7 +438,11 @@ def compile_statistics( # noqa: C901
entity_id = _state.entity_id
# If there are no recent state changes, the sensor's state may already be pruned
# from the recorder. Get the state from the state machine instead.
if not (entity_history := history_list.get(entity_id, [_state])):
try:
entity_history = history_list[entity_id]
except KeyError:
entity_history = [_state] if _state.last_changed < end else []
if not entity_history:
continue
if not (float_states := _entity_history_to_float_and_state(entity_history)):
continue

View File

@ -541,11 +541,11 @@ async def test_compile_hourly_statistics_with_all_same_last_updated(
"max",
),
[
("temperature", "°C", "°C", "°C", "temperature", 0, 60, 60),
("temperature", "°F", "°F", "°F", "temperature", 0, 60, 60),
("temperature", "°C", "°C", "°C", "temperature", 60, -10, 60),
("temperature", "°F", "°F", "°F", "temperature", 60, -10, 60),
],
)
async def test_compile_hourly_statistics_only_state_is_and_end_of_period(
async def test_compile_hourly_statistics_only_state_is_at_end_of_period(
hass: HomeAssistant,
caplog: pytest.LogCaptureFixture,
device_class,
@ -557,7 +557,7 @@ async def test_compile_hourly_statistics_only_state_is_and_end_of_period(
min,
max,
) -> None:
"""Test compiling hourly statistics when the only state at end of period."""
"""Test compiling hourly statistics when the only states are at end of period."""
zero = get_start_time(dt_util.utcnow())
await async_setup_component(hass, "sensor", {})
# Wait for the sensor recorder platform to be added
@ -604,6 +604,7 @@ async def test_compile_hourly_statistics_only_state_is_and_end_of_period(
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
do_adhoc_statistics(hass, start=zero)
do_adhoc_statistics(hass, start=zero + timedelta(minutes=5))
await async_wait_recording_done(hass)
statistic_ids = await async_list_statistic_ids(hass)
assert statistic_ids == [
@ -622,8 +623,8 @@ async def test_compile_hourly_statistics_only_state_is_and_end_of_period(
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"start": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=10)).timestamp(),
"mean": pytest.approx(mean),
"min": pytest.approx(min),
"max": pytest.approx(max),
@ -651,7 +652,10 @@ async def test_compile_hourly_statistics_purged_state_changes(
statistics_unit,
unit_class,
) -> None:
"""Test compiling hourly statistics."""
"""Test compiling hourly statistics.
This tests statistics falls back to the state machine when states are purged.
"""
zero = get_start_time(dt_util.utcnow())
await async_setup_component(hass, "sensor", {})
# Wait for the sensor recorder platform to be added
@ -716,6 +720,93 @@ async def test_compile_hourly_statistics_purged_state_changes(
assert "Error while processing event StatisticsTask" not in caplog.text
@pytest.mark.parametrize(
(
"device_class",
"state_unit",
"display_unit",
"statistics_unit",
"unit_class",
"mean",
"min",
"max",
),
[
(None, "%", "%", "%", "unitless", 13.050847, -10, 30),
],
)
async def test_compile_hourly_statistics_ignore_future_state(
hass: HomeAssistant,
caplog: pytest.LogCaptureFixture,
device_class,
state_unit,
display_unit,
statistics_unit,
unit_class,
mean,
min,
max,
) -> None:
"""Test compiling hourly statistics.
This tests statistics does not fall back to the state machine if the state
in the state machine is newer than the end of the statistics period.
"""
zero = get_start_time(dt_util.utcnow() + timedelta(minutes=5))
previous_period = zero - timedelta(minutes=5)
await async_setup_component(hass, "sensor", {})
# Wait for the sensor recorder platform to be added
await async_recorder_block_till_done(hass)
attributes = {
"device_class": device_class,
"state_class": "measurement",
"unit_of_measurement": state_unit,
}
with freeze_time(zero) as freezer:
four, states = await async_record_states(
hass, freezer, zero, "sensor.test1", attributes
)
await async_wait_recording_done(hass)
hist = history.get_significant_states(
hass, zero, four, hass.states.async_entity_ids()
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
do_adhoc_statistics(hass, start=previous_period)
do_adhoc_statistics(hass, start=zero)
await async_wait_recording_done(hass)
statistic_ids = await async_list_statistic_ids(hass)
assert statistic_ids == [
{
"statistic_id": "sensor.test1",
"display_unit_of_measurement": display_unit,
"has_mean": True,
"has_sum": False,
"name": None,
"source": "recorder",
"statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
}
]
stats = statistics_during_period(hass, previous_period, period="5minute")
# Check we get no stats from the previous period
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"mean": pytest.approx(mean),
"min": pytest.approx(min),
"max": pytest.approx(max),
"last_reset": None,
"state": None,
"sum": None,
}
]
}
assert "Error while processing event StatisticsTask" not in caplog.text
@pytest.mark.parametrize("attributes", [TEMPERATURE_SENSOR_ATTRIBUTES])
async def test_compile_hourly_statistics_wrong_unit(
hass: HomeAssistant,