Include data for entire periods in response to recorder/statistics_during_period (#92848)

* Include data for entire periods in response to recorder/statistics_during_period

* Break out function for calculating the end of the month
This commit is contained in:
Erik Montnemery 2023-05-11 11:28:33 +02:00 committed by GitHub
parent 67d5ca4894
commit c1f716487c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 341 additions and 44 deletions

View File

@ -981,6 +981,14 @@ def _reduce_statistics_per_week(
)
def _find_month_end_time(timestamp: datetime) -> datetime:
"""Return the end of the month (midnight at the first day of the next month)."""
# We add 4 days to the end to make sure we are in the next month
return (timestamp.replace(day=28) + timedelta(days=4)).replace(
day=1, hour=0, minute=0, second=0, microsecond=0
)
def reduce_month_ts_factory() -> (
tuple[
Callable[[float, float], bool],
@ -1007,10 +1015,7 @@ def reduce_month_ts_factory() -> (
start_local = _local_from_timestamp(time).replace(
day=1, hour=0, minute=0, second=0, microsecond=0
)
# We add 4 days to the end to make sure we are in the next month
end_local = (start_local.replace(day=28) + timedelta(days=4)).replace(
day=1, hour=0, minute=0, second=0, microsecond=0
)
end_local = _find_month_end_time(start_local)
return (
start_local.astimezone(dt_util.UTC).timestamp(),
end_local.astimezone(dt_util.UTC).timestamp(),
@ -1629,6 +1634,36 @@ def _statistics_during_period_with_session(
if statistic_ids is not None:
metadata_ids = _extract_metadata_and_discard_impossible_columns(metadata, types)
# Align start_time and end_time with the period
if period == "day":
start_time = dt_util.as_local(start_time).replace(
hour=0, minute=0, second=0, microsecond=0
)
start_time = start_time.replace()
if end_time is not None:
end_local = dt_util.as_local(end_time)
end_time = end_local.replace(
hour=0, minute=0, second=0, microsecond=0
) + timedelta(days=1)
elif period == "week":
start_local = dt_util.as_local(start_time)
start_time = start_local.replace(
hour=0, minute=0, second=0, microsecond=0
) - timedelta(days=start_local.weekday())
if end_time is not None:
end_local = dt_util.as_local(end_time)
end_time = (
end_local.replace(hour=0, minute=0, second=0, microsecond=0)
- timedelta(days=end_local.weekday())
+ timedelta(days=7)
)
elif period == "month":
start_time = dt_util.as_local(start_time).replace(
day=1, hour=0, minute=0, second=0, microsecond=0
)
if end_time is not None:
end_time = _find_month_end_time(dt_util.as_local(end_time))
table: type[Statistics | StatisticsShortTerm] = (
Statistics if period != "5minute" else StatisticsShortTerm
)

View File

@ -961,7 +961,7 @@ def test_import_statistics_errors(
@pytest.mark.parametrize("timezone", ["America/Regina", "Europe/Vienna", "UTC"])
@pytest.mark.freeze_time("2022-10-01 00:00:00+00:00")
def test_daily_statistics(
def test_daily_statistics_sum(
hass_recorder: Callable[..., HomeAssistant],
caplog: pytest.LogCaptureFixture,
timezone,
@ -979,6 +979,8 @@ def test_daily_statistics(
period2 = dt_util.as_utc(dt_util.parse_datetime("2022-10-03 23:00:00"))
period3 = dt_util.as_utc(dt_util.parse_datetime("2022-10-04 00:00:00"))
period4 = dt_util.as_utc(dt_util.parse_datetime("2022-10-04 23:00:00"))
period5 = dt_util.as_utc(dt_util.parse_datetime("2022-10-05 00:00:00"))
period6 = dt_util.as_utc(dt_util.parse_datetime("2022-10-05 23:00:00"))
external_statistics = (
{
@ -1005,6 +1007,18 @@ def test_daily_statistics(
"state": 3,
"sum": 5,
},
{
"start": period5,
"last_reset": None,
"state": 4,
"sum": 6,
},
{
"start": period6,
"last_reset": None,
"state": 5,
"sum": 7,
},
)
external_metadata = {
"has_mean": False,
@ -1024,6 +1038,8 @@ def test_daily_statistics(
day1_end = dt_util.as_utc(dt_util.parse_datetime("2022-10-04 00:00:00"))
day2_start = dt_util.as_utc(dt_util.parse_datetime("2022-10-04 00:00:00"))
day2_end = dt_util.as_utc(dt_util.parse_datetime("2022-10-05 00:00:00"))
day3_start = dt_util.as_utc(dt_util.parse_datetime("2022-10-05 00:00:00"))
day3_end = dt_util.as_utc(dt_util.parse_datetime("2022-10-06 00:00:00"))
expected_stats = {
"test:total_energy_import": [
{
@ -1040,6 +1056,13 @@ def test_daily_statistics(
"state": 3.0,
"sum": 5.0,
},
{
"start": day3_start.timestamp(),
"end": day3_end.timestamp(),
"last_reset": None,
"state": 5.0,
"sum": 7.0,
},
]
}
assert stats == expected_stats
@ -1064,6 +1087,11 @@ def test_daily_statistics(
"end": day2_end.timestamp(),
"change": 2.0,
},
{
"start": day3_start.timestamp(),
"end": day3_end.timestamp(),
"change": 2.0,
},
]
}
@ -1076,6 +1104,16 @@ def test_daily_statistics(
)
assert stats == expected_stats
# Get data with end during the third period
stats = statistics_during_period(
hass,
start_time=zero,
end_time=period6 - timedelta(hours=1),
statistic_ids={"test:total_energy_import"},
period="day",
)
assert stats == expected_stats
# Try to get data for entities which do not exist
stats = statistics_during_period(
hass,
@ -1106,7 +1144,7 @@ def test_daily_statistics(
@pytest.mark.parametrize("timezone", ["America/Regina", "Europe/Vienna", "UTC"])
@pytest.mark.freeze_time("2022-10-01 00:00:00+00:00")
def test_weekly_statistics(
def test_weekly_statistics_mean(
hass_recorder: Callable[..., HomeAssistant],
caplog: pytest.LogCaptureFixture,
timezone,
@ -1121,7 +1159,7 @@ def test_weekly_statistics(
zero = dt_util.utcnow()
period1 = dt_util.as_utc(dt_util.parse_datetime("2022-10-03 00:00:00"))
period2 = dt_util.as_utc(dt_util.parse_datetime("2022-10-09 23:00:00"))
period2 = dt_util.as_utc(dt_util.parse_datetime("2022-10-05 23:00:00"))
period3 = dt_util.as_utc(dt_util.parse_datetime("2022-10-10 00:00:00"))
period4 = dt_util.as_utc(dt_util.parse_datetime("2022-10-16 23:00:00"))
@ -1129,31 +1167,35 @@ def test_weekly_statistics(
{
"start": period1,
"last_reset": None,
"state": 0,
"sum": 2,
"max": 0,
"mean": 10,
"min": -100,
},
{
"start": period2,
"last_reset": None,
"state": 1,
"sum": 3,
"max": 10,
"mean": 20,
"min": -90,
},
{
"start": period3,
"last_reset": None,
"state": 2,
"sum": 4,
"max": 20,
"mean": 30,
"min": -80,
},
{
"start": period4,
"last_reset": None,
"state": 3,
"sum": 5,
"max": 30,
"mean": 40,
"min": -70,
},
)
external_metadata = {
"has_mean": False,
"has_sum": True,
"has_mean": True,
"has_sum": False,
"name": "Total imported energy",
"source": "test",
"statistic_id": "test:total_energy_import",
@ -1162,6 +1204,7 @@ def test_weekly_statistics(
async_add_external_statistics(hass, external_metadata, external_statistics)
wait_recording_done(hass)
# Get all data
stats = statistics_during_period(
hass, zero, period="week", statistic_ids={"test:total_energy_import"}
)
@ -1175,42 +1218,30 @@ def test_weekly_statistics(
"start": week1_start.timestamp(),
"end": week1_end.timestamp(),
"last_reset": None,
"state": 1.0,
"sum": 3.0,
"max": 10,
"mean": 15,
"min": -100,
},
{
"start": week2_start.timestamp(),
"end": week2_end.timestamp(),
"last_reset": None,
"state": 3.0,
"sum": 5.0,
"max": 30,
"mean": 35,
"min": -80,
},
]
}
assert stats == expected_stats
# Get change
# Get data starting with start of the first period
stats = statistics_during_period(
hass,
start_time=period1,
statistic_ids={"test:total_energy_import"},
period="week",
types={"change"},
)
assert stats == {
"test:total_energy_import": [
{
"start": week1_start.timestamp(),
"end": week1_end.timestamp(),
"change": 3.0,
},
{
"start": week2_start.timestamp(),
"end": week2_end.timestamp(),
"change": 2.0,
},
]
}
assert stats == expected_stats
# Get data with start during the first period
stats = statistics_during_period(
@ -1250,13 +1281,13 @@ def test_weekly_statistics(
@pytest.mark.parametrize("timezone", ["America/Regina", "Europe/Vienna", "UTC"])
@pytest.mark.freeze_time("2021-08-01 00:00:00+00:00")
def test_monthly_statistics(
@pytest.mark.freeze_time("2022-10-01 00:00:00+00:00")
def test_weekly_statistics_sum(
hass_recorder: Callable[..., HomeAssistant],
caplog: pytest.LogCaptureFixture,
timezone,
) -> None:
"""Test monthly statistics."""
"""Test weekly statistics."""
dt_util.set_default_time_zone(dt_util.get_time_zone(timezone))
hass = hass_recorder()
@ -1265,10 +1296,12 @@ def test_monthly_statistics(
assert "Statistics already compiled" not in caplog.text
zero = dt_util.utcnow()
period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00"))
period3 = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
period4 = dt_util.as_utc(dt_util.parse_datetime("2021-10-31 23:00:00"))
period1 = dt_util.as_utc(dt_util.parse_datetime("2022-10-03 00:00:00"))
period2 = dt_util.as_utc(dt_util.parse_datetime("2022-10-09 23:00:00"))
period3 = dt_util.as_utc(dt_util.parse_datetime("2022-10-10 00:00:00"))
period4 = dt_util.as_utc(dt_util.parse_datetime("2022-10-16 23:00:00"))
period5 = dt_util.as_utc(dt_util.parse_datetime("2022-10-17 00:00:00"))
period6 = dt_util.as_utc(dt_util.parse_datetime("2022-10-23 23:00:00"))
external_statistics = (
{
@ -1295,6 +1328,201 @@ def test_monthly_statistics(
"state": 3,
"sum": 5,
},
{
"start": period5,
"last_reset": None,
"state": 4,
"sum": 6,
},
{
"start": period6,
"last_reset": None,
"state": 5,
"sum": 7,
},
)
external_metadata = {
"has_mean": False,
"has_sum": True,
"name": "Total imported energy",
"source": "test",
"statistic_id": "test:total_energy_import",
"unit_of_measurement": "kWh",
}
async_add_external_statistics(hass, external_metadata, external_statistics)
wait_recording_done(hass)
stats = statistics_during_period(
hass, zero, period="week", statistic_ids={"test:total_energy_import"}
)
week1_start = dt_util.as_utc(dt_util.parse_datetime("2022-10-03 00:00:00"))
week1_end = dt_util.as_utc(dt_util.parse_datetime("2022-10-10 00:00:00"))
week2_start = dt_util.as_utc(dt_util.parse_datetime("2022-10-10 00:00:00"))
week2_end = dt_util.as_utc(dt_util.parse_datetime("2022-10-17 00:00:00"))
week3_start = dt_util.as_utc(dt_util.parse_datetime("2022-10-17 00:00:00"))
week3_end = dt_util.as_utc(dt_util.parse_datetime("2022-10-24 00:00:00"))
expected_stats = {
"test:total_energy_import": [
{
"start": week1_start.timestamp(),
"end": week1_end.timestamp(),
"last_reset": None,
"state": 1.0,
"sum": 3.0,
},
{
"start": week2_start.timestamp(),
"end": week2_end.timestamp(),
"last_reset": None,
"state": 3.0,
"sum": 5.0,
},
{
"start": week3_start.timestamp(),
"end": week3_end.timestamp(),
"last_reset": None,
"state": 5.0,
"sum": 7.0,
},
]
}
assert stats == expected_stats
# Get change
stats = statistics_during_period(
hass,
start_time=period1,
statistic_ids={"test:total_energy_import"},
period="week",
types={"change"},
)
assert stats == {
"test:total_energy_import": [
{
"start": week1_start.timestamp(),
"end": week1_end.timestamp(),
"change": 3.0,
},
{
"start": week2_start.timestamp(),
"end": week2_end.timestamp(),
"change": 2.0,
},
{
"start": week3_start.timestamp(),
"end": week3_end.timestamp(),
"change": 2.0,
},
]
}
# Get data with start during the first period
stats = statistics_during_period(
hass,
start_time=period1 + timedelta(days=1),
statistic_ids={"test:total_energy_import"},
period="week",
)
assert stats == expected_stats
# Get data with end during the third period
stats = statistics_during_period(
hass,
start_time=zero,
end_time=period6 - timedelta(days=1),
statistic_ids={"test:total_energy_import"},
period="week",
)
assert stats == expected_stats
# Try to get data for entities which do not exist
stats = statistics_during_period(
hass,
start_time=zero,
statistic_ids={"not", "the", "same", "test:total_energy_import"},
period="week",
)
assert stats == expected_stats
# Use 5minute to ensure table switch works
stats = statistics_during_period(
hass,
start_time=zero,
statistic_ids=["test:total_energy_import", "with_other"],
period="5minute",
)
assert stats == {}
# Ensure future date has not data
future = dt_util.as_utc(dt_util.parse_datetime("2221-11-01 00:00:00"))
stats = statistics_during_period(
hass, start_time=future, end_time=future, period="week"
)
assert stats == {}
dt_util.set_default_time_zone(dt_util.get_time_zone("UTC"))
@pytest.mark.parametrize("timezone", ["America/Regina", "Europe/Vienna", "UTC"])
@pytest.mark.freeze_time("2021-08-01 00:00:00+00:00")
def test_monthly_statistics_sum(
hass_recorder: Callable[..., HomeAssistant],
caplog: pytest.LogCaptureFixture,
timezone,
) -> None:
"""Test monthly statistics."""
dt_util.set_default_time_zone(dt_util.get_time_zone(timezone))
hass = hass_recorder()
wait_recording_done(hass)
assert "Compiling statistics for" not in caplog.text
assert "Statistics already compiled" not in caplog.text
zero = dt_util.utcnow()
period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00"))
period3 = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
period4 = dt_util.as_utc(dt_util.parse_datetime("2021-10-31 23:00:00"))
period5 = dt_util.as_utc(dt_util.parse_datetime("2021-11-01 00:00:00"))
period6 = dt_util.as_utc(dt_util.parse_datetime("2021-11-30 23:00:00"))
external_statistics = (
{
"start": period1,
"last_reset": None,
"state": 0,
"sum": 2,
},
{
"start": period2,
"last_reset": None,
"state": 1,
"sum": 3,
},
{
"start": period3,
"last_reset": None,
"state": 2,
"sum": 4,
},
{
"start": period4,
"last_reset": None,
"state": 3,
"sum": 5,
},
{
"start": period5,
"last_reset": None,
"state": 4,
"sum": 6,
},
{
"start": period6,
"last_reset": None,
"state": 5,
"sum": 7,
},
)
external_metadata = {
"has_mean": False,
@ -1314,6 +1542,8 @@ def test_monthly_statistics(
sep_end = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
oct_start = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
oct_end = dt_util.as_utc(dt_util.parse_datetime("2021-11-01 00:00:00"))
nov_start = dt_util.as_utc(dt_util.parse_datetime("2021-11-01 00:00:00"))
nov_end = dt_util.as_utc(dt_util.parse_datetime("2021-12-01 00:00:00"))
expected_stats = {
"test:total_energy_import": [
{
@ -1330,6 +1560,13 @@ def test_monthly_statistics(
"state": pytest.approx(3.0),
"sum": pytest.approx(5.0),
},
{
"start": nov_start.timestamp(),
"end": nov_end.timestamp(),
"last_reset": None,
"state": 5.0,
"sum": 7.0,
},
]
}
assert stats == expected_stats
@ -1354,6 +1591,11 @@ def test_monthly_statistics(
"end": oct_end.timestamp(),
"change": 2.0,
},
{
"start": nov_start.timestamp(),
"end": nov_end.timestamp(),
"change": 2.0,
},
]
}
# Get data with start during the first period
@ -1365,6 +1607,16 @@ def test_monthly_statistics(
)
assert stats == expected_stats
# Get data with end during the third period
stats = statistics_during_period(
hass,
start_time=zero,
end_time=period6 - timedelta(days=1),
statistic_ids={"test:total_energy_import"},
period="month",
)
assert stats == expected_stats
# Try to get data for entities which do not exist
stats = statistics_during_period(
hass,
@ -1394,6 +1646,11 @@ def test_monthly_statistics(
"end": oct_end.timestamp(),
"sum": pytest.approx(5.0),
},
{
"start": nov_start.timestamp(),
"end": nov_end.timestamp(),
"sum": pytest.approx(7.0),
},
]
}
@ -1418,6 +1675,11 @@ def test_monthly_statistics(
"end": oct_end.timestamp(),
"sum": pytest.approx(5000.0),
},
{
"start": nov_start.timestamp(),
"end": nov_end.timestamp(),
"sum": pytest.approx(7000.0),
},
]
}