mirror of
https://github.com/home-assistant/core.git
synced 2025-07-08 22:07:10 +00:00
Correct fallback to state in state machine when processing statistics (#140396)
This commit is contained in:
parent
ffa6f42c0e
commit
427aa55789
@ -134,16 +134,7 @@ def _time_weighted_average(
|
|||||||
duration = end - old_start_time
|
duration = end - old_start_time
|
||||||
accumulated += old_fstate * duration.total_seconds()
|
accumulated += old_fstate * duration.total_seconds()
|
||||||
|
|
||||||
period_seconds = (end - start).total_seconds()
|
return accumulated / (end - start).total_seconds()
|
||||||
if period_seconds == 0:
|
|
||||||
# If the only state changed that happened was at the exact moment
|
|
||||||
# at the end of the period, we can't calculate a meaningful average
|
|
||||||
# so we return 0.0 since it represents a time duration smaller than
|
|
||||||
# we can measure. This probably means the precision of statistics
|
|
||||||
# column schema in the database is incorrect but it is actually possible
|
|
||||||
# to happen if the state change event fired at the exact microsecond
|
|
||||||
return 0.0
|
|
||||||
return accumulated / period_seconds
|
|
||||||
|
|
||||||
|
|
||||||
def _get_units(fstates: list[tuple[float, State]]) -> set[str | None]:
|
def _get_units(fstates: list[tuple[float, State]]) -> set[str | None]:
|
||||||
@ -447,7 +438,11 @@ def compile_statistics( # noqa: C901
|
|||||||
entity_id = _state.entity_id
|
entity_id = _state.entity_id
|
||||||
# If there are no recent state changes, the sensor's state may already be pruned
|
# If there are no recent state changes, the sensor's state may already be pruned
|
||||||
# from the recorder. Get the state from the state machine instead.
|
# from the recorder. Get the state from the state machine instead.
|
||||||
if not (entity_history := history_list.get(entity_id, [_state])):
|
try:
|
||||||
|
entity_history = history_list[entity_id]
|
||||||
|
except KeyError:
|
||||||
|
entity_history = [_state] if _state.last_changed < end else []
|
||||||
|
if not entity_history:
|
||||||
continue
|
continue
|
||||||
if not (float_states := _entity_history_to_float_and_state(entity_history)):
|
if not (float_states := _entity_history_to_float_and_state(entity_history)):
|
||||||
continue
|
continue
|
||||||
|
@ -541,11 +541,11 @@ async def test_compile_hourly_statistics_with_all_same_last_updated(
|
|||||||
"max",
|
"max",
|
||||||
),
|
),
|
||||||
[
|
[
|
||||||
("temperature", "°C", "°C", "°C", "temperature", 0, 60, 60),
|
("temperature", "°C", "°C", "°C", "temperature", 60, -10, 60),
|
||||||
("temperature", "°F", "°F", "°F", "temperature", 0, 60, 60),
|
("temperature", "°F", "°F", "°F", "temperature", 60, -10, 60),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
async def test_compile_hourly_statistics_only_state_is_and_end_of_period(
|
async def test_compile_hourly_statistics_only_state_is_at_end_of_period(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
caplog: pytest.LogCaptureFixture,
|
caplog: pytest.LogCaptureFixture,
|
||||||
device_class,
|
device_class,
|
||||||
@ -557,7 +557,7 @@ async def test_compile_hourly_statistics_only_state_is_and_end_of_period(
|
|||||||
min,
|
min,
|
||||||
max,
|
max,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test compiling hourly statistics when the only state at end of period."""
|
"""Test compiling hourly statistics when the only states are at end of period."""
|
||||||
zero = get_start_time(dt_util.utcnow())
|
zero = get_start_time(dt_util.utcnow())
|
||||||
await async_setup_component(hass, "sensor", {})
|
await async_setup_component(hass, "sensor", {})
|
||||||
# Wait for the sensor recorder platform to be added
|
# Wait for the sensor recorder platform to be added
|
||||||
@ -604,6 +604,7 @@ async def test_compile_hourly_statistics_only_state_is_and_end_of_period(
|
|||||||
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
|
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
|
||||||
|
|
||||||
do_adhoc_statistics(hass, start=zero)
|
do_adhoc_statistics(hass, start=zero)
|
||||||
|
do_adhoc_statistics(hass, start=zero + timedelta(minutes=5))
|
||||||
await async_wait_recording_done(hass)
|
await async_wait_recording_done(hass)
|
||||||
statistic_ids = await async_list_statistic_ids(hass)
|
statistic_ids = await async_list_statistic_ids(hass)
|
||||||
assert statistic_ids == [
|
assert statistic_ids == [
|
||||||
@ -622,8 +623,8 @@ async def test_compile_hourly_statistics_only_state_is_and_end_of_period(
|
|||||||
assert stats == {
|
assert stats == {
|
||||||
"sensor.test1": [
|
"sensor.test1": [
|
||||||
{
|
{
|
||||||
"start": process_timestamp(zero).timestamp(),
|
"start": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
|
||||||
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
|
"end": process_timestamp(zero + timedelta(minutes=10)).timestamp(),
|
||||||
"mean": pytest.approx(mean),
|
"mean": pytest.approx(mean),
|
||||||
"min": pytest.approx(min),
|
"min": pytest.approx(min),
|
||||||
"max": pytest.approx(max),
|
"max": pytest.approx(max),
|
||||||
@ -651,7 +652,10 @@ async def test_compile_hourly_statistics_purged_state_changes(
|
|||||||
statistics_unit,
|
statistics_unit,
|
||||||
unit_class,
|
unit_class,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test compiling hourly statistics."""
|
"""Test compiling hourly statistics.
|
||||||
|
|
||||||
|
This tests statistics falls back to the state machine when states are purged.
|
||||||
|
"""
|
||||||
zero = get_start_time(dt_util.utcnow())
|
zero = get_start_time(dt_util.utcnow())
|
||||||
await async_setup_component(hass, "sensor", {})
|
await async_setup_component(hass, "sensor", {})
|
||||||
# Wait for the sensor recorder platform to be added
|
# Wait for the sensor recorder platform to be added
|
||||||
@ -716,6 +720,93 @@ async def test_compile_hourly_statistics_purged_state_changes(
|
|||||||
assert "Error while processing event StatisticsTask" not in caplog.text
|
assert "Error while processing event StatisticsTask" not in caplog.text
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
(
|
||||||
|
"device_class",
|
||||||
|
"state_unit",
|
||||||
|
"display_unit",
|
||||||
|
"statistics_unit",
|
||||||
|
"unit_class",
|
||||||
|
"mean",
|
||||||
|
"min",
|
||||||
|
"max",
|
||||||
|
),
|
||||||
|
[
|
||||||
|
(None, "%", "%", "%", "unitless", 13.050847, -10, 30),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
async def test_compile_hourly_statistics_ignore_future_state(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
caplog: pytest.LogCaptureFixture,
|
||||||
|
device_class,
|
||||||
|
state_unit,
|
||||||
|
display_unit,
|
||||||
|
statistics_unit,
|
||||||
|
unit_class,
|
||||||
|
mean,
|
||||||
|
min,
|
||||||
|
max,
|
||||||
|
) -> None:
|
||||||
|
"""Test compiling hourly statistics.
|
||||||
|
|
||||||
|
This tests statistics does not fall back to the state machine if the state
|
||||||
|
in the state machine is newer than the end of the statistics period.
|
||||||
|
"""
|
||||||
|
zero = get_start_time(dt_util.utcnow() + timedelta(minutes=5))
|
||||||
|
previous_period = zero - timedelta(minutes=5)
|
||||||
|
await async_setup_component(hass, "sensor", {})
|
||||||
|
# Wait for the sensor recorder platform to be added
|
||||||
|
await async_recorder_block_till_done(hass)
|
||||||
|
attributes = {
|
||||||
|
"device_class": device_class,
|
||||||
|
"state_class": "measurement",
|
||||||
|
"unit_of_measurement": state_unit,
|
||||||
|
}
|
||||||
|
with freeze_time(zero) as freezer:
|
||||||
|
four, states = await async_record_states(
|
||||||
|
hass, freezer, zero, "sensor.test1", attributes
|
||||||
|
)
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
hist = history.get_significant_states(
|
||||||
|
hass, zero, four, hass.states.async_entity_ids()
|
||||||
|
)
|
||||||
|
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
|
||||||
|
|
||||||
|
do_adhoc_statistics(hass, start=previous_period)
|
||||||
|
do_adhoc_statistics(hass, start=zero)
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
statistic_ids = await async_list_statistic_ids(hass)
|
||||||
|
assert statistic_ids == [
|
||||||
|
{
|
||||||
|
"statistic_id": "sensor.test1",
|
||||||
|
"display_unit_of_measurement": display_unit,
|
||||||
|
"has_mean": True,
|
||||||
|
"has_sum": False,
|
||||||
|
"name": None,
|
||||||
|
"source": "recorder",
|
||||||
|
"statistics_unit_of_measurement": statistics_unit,
|
||||||
|
"unit_class": unit_class,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
stats = statistics_during_period(hass, previous_period, period="5minute")
|
||||||
|
# Check we get no stats from the previous period
|
||||||
|
assert stats == {
|
||||||
|
"sensor.test1": [
|
||||||
|
{
|
||||||
|
"start": process_timestamp(zero).timestamp(),
|
||||||
|
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
|
||||||
|
"mean": pytest.approx(mean),
|
||||||
|
"min": pytest.approx(min),
|
||||||
|
"max": pytest.approx(max),
|
||||||
|
"last_reset": None,
|
||||||
|
"state": None,
|
||||||
|
"sum": None,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
assert "Error while processing event StatisticsTask" not in caplog.text
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("attributes", [TEMPERATURE_SENSOR_ATTRIBUTES])
|
@pytest.mark.parametrize("attributes", [TEMPERATURE_SENSOR_ATTRIBUTES])
|
||||||
async def test_compile_hourly_statistics_wrong_unit(
|
async def test_compile_hourly_statistics_wrong_unit(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
|
Loading…
x
Reference in New Issue
Block a user