Optimize fetching statistics by avoiding timestamp conversions (#87845)

* Optimize fetching statistics by avoiding timestamp conversions

* remove accidential unrelated change

* fix test

* recreate so we handle timezone changes
This commit is contained in:
J. Nick Koston 2023-02-10 10:26:46 -06:00 committed by GitHub
parent 6a1cd75a67
commit c35661947a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 346 additions and 321 deletions

View File

@ -4,7 +4,7 @@ from __future__ import annotations
import asyncio
from collections import defaultdict
from collections.abc import Awaitable, Callable
from datetime import datetime, timedelta
from datetime import timedelta
import functools
from itertools import chain
from types import ModuleType
@ -278,9 +278,9 @@ async def ws_get_fossil_energy_consumption(
def _combine_sum_statistics(
stats: dict[str, list[dict[str, Any]]], statistic_ids: list[str]
) -> dict[datetime, float]:
) -> dict[float, float]:
"""Combine multiple statistics, returns a dict indexed by start time."""
result: defaultdict[datetime, float] = defaultdict(float)
result: defaultdict[float, float] = defaultdict(float)
for statistics_id, stat in stats.items():
if statistics_id not in statistic_ids:
@ -292,9 +292,9 @@ async def ws_get_fossil_energy_consumption(
return {key: result[key] for key in sorted(result)}
def _calculate_deltas(sums: dict[datetime, float]) -> dict[datetime, float]:
def _calculate_deltas(sums: dict[float, float]) -> dict[float, float]:
prev: float | None = None
result: dict[datetime, float] = {}
result: dict[float, float] = {}
for period, sum_ in sums.items():
if prev is not None:
result[period] = sum_ - prev
@ -303,8 +303,8 @@ async def ws_get_fossil_energy_consumption(
def _reduce_deltas(
stat_list: list[dict[str, Any]],
same_period: Callable[[datetime, datetime], bool],
period_start_end: Callable[[datetime], tuple[datetime, datetime]],
same_period: Callable[[float, float], bool],
period_start_end: Callable[[float], tuple[float, float]],
period: timedelta,
) -> list[dict[str, Any]]:
"""Reduce hourly deltas to daily or monthly deltas."""
@ -316,14 +316,14 @@ async def ws_get_fossil_energy_consumption(
# Loop over the hourly deltas + a fake entry to end the period
for statistic in chain(
stat_list, ({"start": stat_list[-1]["start"] + period},)
stat_list, ({"start": stat_list[-1]["start"] + period.total_seconds()},)
):
if not same_period(prev_stat["start"], statistic["start"]):
start, _ = period_start_end(prev_stat["start"])
# The previous statistic was the last entry of the period
result.append(
{
"start": start.isoformat(),
"start": dt_util.utc_from_timestamp(start).isoformat(),
"delta": sum(deltas),
}
)
@ -351,24 +351,30 @@ async def ws_get_fossil_energy_consumption(
if msg["period"] == "hour":
reduced_fossil_energy = [
{"start": period["start"].isoformat(), "delta": period["delta"]}
{
"start": dt_util.utc_from_timestamp(period["start"]).isoformat(),
"delta": period["delta"],
}
for period in fossil_energy
]
elif msg["period"] == "day":
_same_day, _day_start_end = recorder.statistics.reduce_day_factory()
_same_day_ts, _day_start_end_ts = recorder.statistics.reduce_day_ts_factory()
reduced_fossil_energy = _reduce_deltas(
fossil_energy,
_same_day,
_day_start_end,
_same_day_ts,
_day_start_end_ts,
timedelta(days=1),
)
else:
_same_month, _month_start_end = recorder.statistics.reduce_month_factory()
(
_same_month_ts,
_month_start_end_ts,
) = recorder.statistics.reduce_month_ts_factory()
reduced_fossil_energy = _reduce_deltas(
fossil_energy,
_same_month,
_month_start_end,
_same_month_ts,
_month_start_end_ts,
timedelta(days=1),
)

View File

@ -1028,13 +1028,14 @@ def list_statistic_ids(
def _reduce_statistics(
stats: dict[str, list[dict[str, Any]]],
same_period: Callable[[datetime, datetime], bool],
period_start_end: Callable[[datetime], tuple[datetime, datetime]],
same_period: Callable[[float, float], bool],
period_start_end: Callable[[float], tuple[float, float]],
period: timedelta,
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]],
) -> dict[str, list[dict[str, Any]]]:
"""Reduce hourly statistics to daily or monthly statistics."""
result: dict[str, list[dict[str, Any]]] = defaultdict(list)
period_seconds = period.total_seconds()
for statistic_id, stat_list in stats.items():
max_values: list[float] = []
mean_values: list[float] = []
@ -1043,7 +1044,7 @@ def _reduce_statistics(
# Loop over the hourly statistics + a fake entry to end the period
for statistic in chain(
stat_list, ({"start": stat_list[-1]["start"] + period},)
stat_list, ({"start": stat_list[-1]["start"] + period_seconds},)
):
if not same_period(prev_stat["start"], statistic["start"]):
start, end = period_start_end(prev_stat["start"])
@ -1080,35 +1081,39 @@ def _reduce_statistics(
return result
def reduce_day_factory() -> (
def reduce_day_ts_factory() -> (
tuple[
Callable[[datetime, datetime], bool],
Callable[[datetime], tuple[datetime, datetime]],
Callable[[float, float], bool],
Callable[[float], tuple[float, float]],
]
):
"""Return functions to match same day and day start end."""
# We have to recreate _local_from_timestamp in the closure in case the timezone changes
_local_from_timestamp = partial(
datetime.fromtimestamp, tz=dt_util.DEFAULT_TIME_ZONE
)
# We create _as_local_cached in the closure in case the timezone changes
_as_local_cached = lru_cache(maxsize=6)(dt_util.as_local)
_as_local_cached = lru_cache(maxsize=6)(_local_from_timestamp)
def _as_local_date(time: datetime) -> date:
def _as_local_date(time: float) -> date:
"""Return the local date of a datetime."""
return dt_util.as_local(time).date()
return _local_from_timestamp(time).date()
_as_local_date_cached = lru_cache(maxsize=6)(_as_local_date)
def _same_day(time1: datetime, time2: datetime) -> bool:
def _same_day_ts(time1: float, time2: float) -> bool:
"""Return True if time1 and time2 are in the same date."""
return _as_local_date_cached(time1) == _as_local_date_cached(time2)
def _day_start_end(time: datetime) -> tuple[datetime, datetime]:
def _day_start_end_ts(time: float) -> tuple[float, float]:
"""Return the start and end of the period (day) time is within."""
start = dt_util.as_utc(
_as_local_cached(time).replace(hour=0, minute=0, second=0, microsecond=0)
)
end = start + timedelta(days=1)
return (start, end)
return (start.timestamp(), end.timestamp())
return _same_day, _day_start_end
return _same_day_ts, _day_start_end_ts
def _reduce_statistics_per_day(
@ -1116,37 +1121,41 @@ def _reduce_statistics_per_day(
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]],
) -> dict[str, list[dict[str, Any]]]:
"""Reduce hourly statistics to daily statistics."""
_same_day, _day_start_end = reduce_day_factory()
_same_day_ts, _day_start_end_ts = reduce_day_ts_factory()
return _reduce_statistics(
stats, _same_day, _day_start_end, timedelta(days=1), types
stats, _same_day_ts, _day_start_end_ts, timedelta(days=1), types
)
def reduce_week_factory() -> (
def reduce_week_ts_factory() -> (
tuple[
Callable[[datetime, datetime], bool],
Callable[[datetime], tuple[datetime, datetime]],
Callable[[float, float], bool],
Callable[[float], tuple[float, float]],
]
):
"""Return functions to match same week and week start end."""
# We have to recreate _local_from_timestamp in the closure in case the timezone changes
_local_from_timestamp = partial(
datetime.fromtimestamp, tz=dt_util.DEFAULT_TIME_ZONE
)
# We create _as_local_cached in the closure in case the timezone changes
_as_local_cached = lru_cache(maxsize=6)(dt_util.as_local)
_as_local_cached = lru_cache(maxsize=6)(_local_from_timestamp)
def _as_local_isocalendar(
time: datetime,
time: float,
) -> tuple: # Need python3.11 for isocalendar typing
"""Return the local isocalendar of a datetime."""
return dt_util.as_local(time).isocalendar()
return _local_from_timestamp(time).isocalendar()
_as_local_isocalendar_cached = lru_cache(maxsize=6)(_as_local_isocalendar)
def _same_week(time1: datetime, time2: datetime) -> bool:
def _same_week_ts(time1: float, time2: float) -> bool:
"""Return True if time1 and time2 are in the same year and week."""
date1 = _as_local_isocalendar_cached(time1)
date2 = _as_local_isocalendar_cached(time2)
return (date1.year, date1.week) == (date2.year, date2.week) # type: ignore[attr-defined]
def _week_start_end(time: datetime) -> tuple[datetime, datetime]:
def _week_start_end_ts(time: float) -> tuple[float, float]:
"""Return the start and end of the period (week) time is within."""
time_local = _as_local_cached(time)
start_local = time_local.replace(
@ -1154,9 +1163,9 @@ def reduce_week_factory() -> (
) - timedelta(days=time_local.weekday())
start = dt_util.as_utc(start_local)
end = dt_util.as_utc(start_local + timedelta(days=7))
return (start, end)
return (start.timestamp(), end.timestamp())
return _same_week, _week_start_end
return _same_week_ts, _week_start_end_ts
def _reduce_statistics_per_week(
@ -1164,34 +1173,33 @@ def _reduce_statistics_per_week(
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]],
) -> dict[str, list[dict[str, Any]]]:
"""Reduce hourly statistics to weekly statistics."""
_same_week, _week_start_end = reduce_week_factory()
_same_week_ts, _week_start_end_ts = reduce_week_ts_factory()
return _reduce_statistics(
stats, _same_week, _week_start_end, timedelta(days=7), types
stats, _same_week_ts, _week_start_end_ts, timedelta(days=7), types
)
def reduce_month_factory() -> (
def reduce_month_ts_factory() -> (
tuple[
Callable[[datetime, datetime], bool],
Callable[[datetime], tuple[datetime, datetime]],
Callable[[float, float], bool],
Callable[[float], tuple[float, float]],
]
):
"""Return functions to match same month and month start end."""
# We have to recreate _local_from_timestamp in the closure in case the timezone changes
_local_from_timestamp = partial(
datetime.fromtimestamp, tz=dt_util.DEFAULT_TIME_ZONE
)
# We create _as_local_cached in the closure in case the timezone changes
_as_local_cached = lru_cache(maxsize=6)(dt_util.as_local)
_as_local_cached = lru_cache(maxsize=6)(_local_from_timestamp)
def _same_month(time1: datetime, time2: datetime) -> bool:
def _same_month_ts(time1: float, time2: float) -> bool:
"""Return True if time1 and time2 are in the same year and month."""
if 2 < time1.day < 26 and 2 < time2.day < 26:
# No need to convert to local time if both dates are far
# enough from possible start or end of the month as time zones
# can't change more than 24 hours in a month.
return (time1.year, time1.month) == (time1.year, time1.month)
date1 = _as_local_cached(time1)
date2 = _as_local_cached(time2)
return (date1.year, date1.month) == (date2.year, date2.month)
def _month_start_end(time: datetime) -> tuple[datetime, datetime]:
def _month_start_end_ts(time: float) -> tuple[float, float]:
"""Return the start and end of the period (month) time is within."""
start_local = _as_local_cached(time).replace(
day=1, hour=0, minute=0, second=0, microsecond=0
@ -1199,9 +1207,9 @@ def reduce_month_factory() -> (
start = dt_util.as_utc(start_local)
end_local = (start_local + timedelta(days=31)).replace(day=1)
end = dt_util.as_utc(end_local)
return (start, end)
return (start.timestamp(), end.timestamp())
return _same_month, _month_start_end
return _same_month_ts, _month_start_end_ts
def _reduce_statistics_per_month(
@ -1209,9 +1217,9 @@ def _reduce_statistics_per_month(
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]],
) -> dict[str, list[dict[str, Any]]]:
"""Reduce hourly statistics to monthly statistics."""
_same_month, _month_start_end = reduce_month_factory()
_same_month_ts, _month_start_end_ts = reduce_month_ts_factory()
return _reduce_statistics(
stats, _same_month, _month_start_end, timedelta(days=31), types
stats, _same_month_ts, _month_start_end_ts, timedelta(days=31), types
)
@ -2028,7 +2036,6 @@ def _sorted_statistics_to_dict(
# Append all statistic entries, and optionally do unit conversion
table_duration_seconds = table.duration.total_seconds()
timestamp_to_datetime = dt_util.utc_from_timestamp
for meta_id, stats_list in stats_by_meta_id.items():
metadata_by_id = metadata[meta_id]
statistic_id = metadata_by_id["statistic_id"]
@ -2043,8 +2050,8 @@ def _sorted_statistics_to_dict(
for db_state in stats_list:
start_ts = db_state.start_ts
row: dict[str, Any] = {
"start": timestamp_to_datetime(start_ts),
"end": timestamp_to_datetime(start_ts + table_duration_seconds),
"start": start_ts,
"end": start_ts + table_duration_seconds,
}
if "mean" in types:
row["mean"] = convert(db_state.mean) if convert else db_state.mean
@ -2053,9 +2060,7 @@ def _sorted_statistics_to_dict(
if "max" in types:
row["max"] = convert(db_state.max) if convert else db_state.max
if "last_reset" in types:
row["last_reset"] = timestamp_to_datetime_or_none(
db_state.last_reset_ts
)
row["last_reset"] = db_state.last_reset_ts
if "state" in types:
row["state"] = convert(db_state.state) if convert else db_state.state
if "sum" in types:
@ -2509,8 +2514,10 @@ def _validate_db_schema(
schema_errors,
stored_statistic[0],
{
"last_reset": statistics["last_reset"],
"start": statistics["start"],
"last_reset": datetime_to_timestamp_or_none(
statistics["last_reset"]
),
"start": datetime_to_timestamp_or_none(statistics["start"]),
},
("start", "last_reset"),
table.__tablename__,

View File

@ -169,11 +169,11 @@ def _ws_get_statistics_during_period(
for statistic_id in result:
for item in result[statistic_id]:
if (start := item.get("start")) is not None:
item["start"] = int(start.timestamp() * 1000)
item["start"] = int(start * 1000)
if (end := item.get("end")) is not None:
item["end"] = int(end.timestamp() * 1000)
item["end"] = int(end * 1000)
if (last_reset := item.get("last_reset")) is not None:
item["last_reset"] = int(last_reset.timestamp() * 1000)
item["last_reset"] = int(last_reset * 1000)
return JSON_DUMP(messages.result_message(msg_id, result))

View File

@ -369,6 +369,13 @@ def _last_reset_as_utc_isoformat(last_reset_s: Any, entity_id: str) -> str | Non
return dt_util.as_utc(last_reset).isoformat()
def _timestamp_to_isoformat_or_none(timestamp: float | None) -> str | None:
"""Convert a timestamp to ISO format or return None."""
if timestamp is None:
return None
return dt_util.utc_from_timestamp(timestamp).isoformat()
def compile_statistics(
hass: HomeAssistant, start: datetime.datetime, end: datetime.datetime
) -> statistics.PlatformCompiledStatistics:
@ -526,9 +533,9 @@ def _compile_statistics( # noqa: C901
if entity_id in last_stats:
# We have compiled history for this sensor before,
# use that as a starting point.
last_reset = old_last_reset = last_stats[entity_id][0]["last_reset"]
if old_last_reset is not None:
last_reset = old_last_reset = old_last_reset.isoformat()
last_reset = old_last_reset = _timestamp_to_isoformat_or_none(
last_stats[entity_id][0]["last_reset"]
)
new_state = old_state = last_stats[entity_id][0]["state"]
_sum = last_stats[entity_id][0]["sum"] or 0.0

View File

@ -14,7 +14,10 @@ from homeassistant.components import recorder
from homeassistant.components.recorder import history, statistics
from homeassistant.components.recorder.const import SQLITE_URL_PREFIX
from homeassistant.components.recorder.db_schema import StatisticsShortTerm
from homeassistant.components.recorder.models import process_timestamp
from homeassistant.components.recorder.models import (
datetime_to_timestamp_or_none,
process_timestamp,
)
from homeassistant.components.recorder.statistics import (
STATISTIC_UNIT_TO_UNIT_CONVERTER,
_statistics_during_period_with_session,
@ -91,8 +94,8 @@ def test_compile_hourly_statistics(hass_recorder):
do_adhoc_statistics(hass, start=four)
wait_recording_done(hass)
expected_1 = {
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"mean": pytest.approx(14.915254237288135),
"min": pytest.approx(10.0),
"max": pytest.approx(20.0),
@ -101,8 +104,8 @@ def test_compile_hourly_statistics(hass_recorder):
"sum": None,
}
expected_2 = {
"start": process_timestamp(four),
"end": process_timestamp(four + timedelta(minutes=5)),
"start": process_timestamp(four).timestamp(),
"end": process_timestamp(four + timedelta(minutes=5)).timestamp(),
"mean": pytest.approx(20.0),
"min": pytest.approx(20.0),
"max": pytest.approx(20.0),
@ -275,8 +278,8 @@ def test_compile_periodic_statistics_exception(
do_adhoc_statistics(hass, start=now + timedelta(minutes=5))
wait_recording_done(hass)
expected_1 = {
"start": process_timestamp(now),
"end": process_timestamp(now + timedelta(minutes=5)),
"start": process_timestamp(now).timestamp(),
"end": process_timestamp(now + timedelta(minutes=5)).timestamp(),
"mean": None,
"min": None,
"max": None,
@ -285,8 +288,8 @@ def test_compile_periodic_statistics_exception(
"sum": None,
}
expected_2 = {
"start": process_timestamp(now + timedelta(minutes=5)),
"end": process_timestamp(now + timedelta(minutes=10)),
"start": process_timestamp(now + timedelta(minutes=5)).timestamp(),
"end": process_timestamp(now + timedelta(minutes=10)).timestamp(),
"mean": None,
"min": None,
"max": None,
@ -345,8 +348,8 @@ def test_rename_entity(hass_recorder):
do_adhoc_statistics(hass, start=zero)
wait_recording_done(hass)
expected_1 = {
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"mean": pytest.approx(14.915254237288135),
"min": pytest.approx(10.0),
"max": pytest.approx(20.0),
@ -411,8 +414,8 @@ def test_rename_entity_collision(hass_recorder, caplog):
do_adhoc_statistics(hass, start=zero)
wait_recording_done(hass)
expected_1 = {
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"mean": pytest.approx(14.915254237288135),
"min": pytest.approx(10.0),
"max": pytest.approx(20.0),
@ -544,22 +547,22 @@ async def test_import_statistics(
assert stats == {
statistic_id: [
{
"start": process_timestamp(period1),
"end": process_timestamp(period1 + timedelta(hours=1)),
"start": process_timestamp(period1).timestamp(),
"end": process_timestamp(period1 + timedelta(hours=1)).timestamp(),
"max": None,
"mean": None,
"min": None,
"last_reset": last_reset_utc,
"last_reset": datetime_to_timestamp_or_none(last_reset_utc),
"state": pytest.approx(0.0),
"sum": pytest.approx(2.0),
},
{
"start": process_timestamp(period2),
"end": process_timestamp(period2 + timedelta(hours=1)),
"start": process_timestamp(period2).timestamp(),
"end": process_timestamp(period2 + timedelta(hours=1)).timestamp(),
"max": None,
"mean": None,
"min": None,
"last_reset": last_reset_utc,
"last_reset": datetime_to_timestamp_or_none(last_reset_utc),
"state": pytest.approx(1.0),
"sum": pytest.approx(3.0),
},
@ -602,12 +605,12 @@ async def test_import_statistics(
assert last_stats == {
statistic_id: [
{
"start": process_timestamp(period2),
"end": process_timestamp(period2 + timedelta(hours=1)),
"start": process_timestamp(period2).timestamp(),
"end": process_timestamp(period2 + timedelta(hours=1)).timestamp(),
"max": None,
"mean": None,
"min": None,
"last_reset": last_reset_utc,
"last_reset": datetime_to_timestamp_or_none(last_reset_utc),
"state": pytest.approx(1.0),
"sum": pytest.approx(3.0),
},
@ -627,8 +630,8 @@ async def test_import_statistics(
assert stats == {
statistic_id: [
{
"start": process_timestamp(period1),
"end": process_timestamp(period1 + timedelta(hours=1)),
"start": process_timestamp(period1).timestamp(),
"end": process_timestamp(period1 + timedelta(hours=1)).timestamp(),
"max": None,
"mean": None,
"min": None,
@ -637,12 +640,12 @@ async def test_import_statistics(
"sum": pytest.approx(6.0),
},
{
"start": process_timestamp(period2),
"end": process_timestamp(period2 + timedelta(hours=1)),
"start": process_timestamp(period2).timestamp(),
"end": process_timestamp(period2 + timedelta(hours=1)).timestamp(),
"max": None,
"mean": None,
"min": None,
"last_reset": last_reset_utc,
"last_reset": datetime_to_timestamp_or_none(last_reset_utc),
"state": pytest.approx(1.0),
"sum": pytest.approx(3.0),
},
@ -693,22 +696,22 @@ async def test_import_statistics(
assert stats == {
statistic_id: [
{
"start": process_timestamp(period1),
"end": process_timestamp(period1 + timedelta(hours=1)),
"start": process_timestamp(period1).timestamp(),
"end": process_timestamp(period1 + timedelta(hours=1)).timestamp(),
"max": pytest.approx(1.0),
"mean": pytest.approx(2.0),
"min": pytest.approx(3.0),
"last_reset": last_reset_utc,
"last_reset": datetime_to_timestamp_or_none(last_reset_utc),
"state": pytest.approx(4.0),
"sum": pytest.approx(5.0),
},
{
"start": process_timestamp(period2),
"end": process_timestamp(period2 + timedelta(hours=1)),
"start": process_timestamp(period2).timestamp(),
"end": process_timestamp(period2 + timedelta(hours=1)).timestamp(),
"max": None,
"mean": None,
"min": None,
"last_reset": last_reset_utc,
"last_reset": datetime_to_timestamp_or_none(last_reset_utc),
"state": pytest.approx(1.0),
"sum": pytest.approx(3.0),
},
@ -734,22 +737,22 @@ async def test_import_statistics(
assert stats == {
statistic_id: [
{
"start": process_timestamp(period1),
"end": process_timestamp(period1 + timedelta(hours=1)),
"start": process_timestamp(period1).timestamp(),
"end": process_timestamp(period1 + timedelta(hours=1)).timestamp(),
"max": pytest.approx(1.0),
"mean": pytest.approx(2.0),
"min": pytest.approx(3.0),
"last_reset": last_reset_utc,
"last_reset": datetime_to_timestamp_or_none(last_reset_utc),
"state": pytest.approx(4.0),
"sum": pytest.approx(5.0),
},
{
"start": process_timestamp(period2),
"end": process_timestamp(period2 + timedelta(hours=1)),
"start": process_timestamp(period2).timestamp(),
"end": process_timestamp(period2 + timedelta(hours=1)).timestamp(),
"max": None,
"mean": None,
"min": None,
"last_reset": last_reset_utc,
"last_reset": datetime_to_timestamp_or_none(last_reset_utc),
"state": pytest.approx(1.0),
"sum": pytest.approx(1000 * 1000 + 3.0),
},
@ -993,8 +996,8 @@ def test_weekly_statistics(hass_recorder, caplog, timezone):
assert stats == {
"test:total_energy_import": [
{
"start": week1_start,
"end": week1_end,
"start": week1_start.timestamp(),
"end": week1_end.timestamp(),
"max": None,
"mean": None,
"min": None,
@ -1003,8 +1006,8 @@ def test_weekly_statistics(hass_recorder, caplog, timezone):
"sum": 3.0,
},
{
"start": week2_start,
"end": week2_end,
"start": week2_start.timestamp(),
"end": week2_end.timestamp(),
"max": None,
"mean": None,
"min": None,
@ -1024,8 +1027,8 @@ def test_weekly_statistics(hass_recorder, caplog, timezone):
assert stats == {
"test:total_energy_import": [
{
"start": week1_start,
"end": week1_end,
"start": week1_start.timestamp(),
"end": week1_end.timestamp(),
"max": None,
"mean": None,
"min": None,
@ -1034,8 +1037,8 @@ def test_weekly_statistics(hass_recorder, caplog, timezone):
"sum": 3.0,
},
{
"start": week2_start,
"end": week2_end,
"start": week2_start.timestamp(),
"end": week2_end.timestamp(),
"max": None,
"mean": None,
"min": None,
@ -1127,8 +1130,8 @@ def test_monthly_statistics(hass_recorder, caplog, timezone):
assert stats == {
"test:total_energy_import": [
{
"start": sep_start,
"end": sep_end,
"start": sep_start.timestamp(),
"end": sep_end.timestamp(),
"max": None,
"mean": None,
"min": None,
@ -1137,8 +1140,8 @@ def test_monthly_statistics(hass_recorder, caplog, timezone):
"sum": pytest.approx(3.0),
},
{
"start": oct_start,
"end": oct_end,
"start": oct_start.timestamp(),
"end": oct_end.timestamp(),
"max": None,
"mean": None,
"min": None,
@ -1162,8 +1165,8 @@ def test_monthly_statistics(hass_recorder, caplog, timezone):
assert stats == {
"test:total_energy_import": [
{
"start": sep_start,
"end": sep_end,
"start": sep_start.timestamp(),
"end": sep_end.timestamp(),
"max": None,
"mean": None,
"min": None,
@ -1172,8 +1175,8 @@ def test_monthly_statistics(hass_recorder, caplog, timezone):
"sum": pytest.approx(3.0),
},
{
"start": oct_start,
"end": oct_end,
"start": oct_start.timestamp(),
"end": oct_end.timestamp(),
"max": None,
"mean": None,
"min": None,
@ -1570,11 +1573,11 @@ async def test_validate_db_schema_fix_float_issue(
statistics = {
"recorder.db_test": [
{
"last_reset": precise_time,
"last_reset": precise_time.timestamp(),
"max": precise_number,
"mean": precise_number,
"min": precise_number,
"start": precise_time,
"start": precise_time.timestamp(),
"state": precise_number,
"sum": precise_number,
}

View File

@ -2446,8 +2446,8 @@ async def test_import_statistics(
assert stats == {
statistic_id: [
{
"start": period1,
"end": (period1 + timedelta(hours=1)),
"start": period1.timestamp(),
"end": (period1 + timedelta(hours=1)).timestamp(),
"max": None,
"mean": None,
"min": None,
@ -2456,8 +2456,8 @@ async def test_import_statistics(
"sum": pytest.approx(2.0),
},
{
"start": period2,
"end": period2 + timedelta(hours=1),
"start": period2.timestamp(),
"end": (period2 + timedelta(hours=1)).timestamp(),
"max": None,
"mean": None,
"min": None,
@ -2504,8 +2504,8 @@ async def test_import_statistics(
assert last_stats == {
statistic_id: [
{
"start": period2,
"end": period2 + timedelta(hours=1),
"start": period2.timestamp(),
"end": (period2 + timedelta(hours=1)).timestamp(),
"max": None,
"mean": None,
"min": None,
@ -2541,8 +2541,8 @@ async def test_import_statistics(
assert stats == {
statistic_id: [
{
"start": period1,
"end": period1 + timedelta(hours=1),
"start": period1.timestamp(),
"end": (period1 + timedelta(hours=1)).timestamp(),
"max": None,
"mean": None,
"min": None,
@ -2551,8 +2551,8 @@ async def test_import_statistics(
"sum": pytest.approx(6.0),
},
{
"start": period2,
"end": period2 + timedelta(hours=1),
"start": period2.timestamp(),
"end": (period2 + timedelta(hours=1)).timestamp(),
"max": None,
"mean": None,
"min": None,
@ -2591,8 +2591,8 @@ async def test_import_statistics(
assert stats == {
statistic_id: [
{
"start": period1,
"end": period1 + timedelta(hours=1),
"start": period1.timestamp(),
"end": (period1 + timedelta(hours=1)).timestamp(),
"max": pytest.approx(1.0),
"mean": pytest.approx(2.0),
"min": pytest.approx(3.0),
@ -2601,8 +2601,8 @@ async def test_import_statistics(
"sum": pytest.approx(5.0),
},
{
"start": period2,
"end": period2 + timedelta(hours=1),
"start": period2.timestamp(),
"end": (period2 + timedelta(hours=1)).timestamp(),
"max": None,
"mean": None,
"min": None,
@ -2673,8 +2673,8 @@ async def test_adjust_sum_statistics_energy(
assert stats == {
statistic_id: [
{
"start": period1,
"end": period1 + timedelta(hours=1),
"start": period1.timestamp(),
"end": (period1 + timedelta(hours=1)).timestamp(),
"max": None,
"mean": None,
"min": None,
@ -2683,8 +2683,8 @@ async def test_adjust_sum_statistics_energy(
"sum": pytest.approx(2.0),
},
{
"start": period2,
"end": period2 + timedelta(hours=1),
"start": period2.timestamp(),
"end": (period2 + timedelta(hours=1)).timestamp(),
"max": None,
"mean": None,
"min": None,
@ -2741,8 +2741,8 @@ async def test_adjust_sum_statistics_energy(
assert stats == {
statistic_id: [
{
"start": period1,
"end": period1 + timedelta(hours=1),
"start": period1.timestamp(),
"end": (period1 + timedelta(hours=1)).timestamp(),
"max": pytest.approx(None),
"mean": pytest.approx(None),
"min": pytest.approx(None),
@ -2751,8 +2751,8 @@ async def test_adjust_sum_statistics_energy(
"sum": pytest.approx(2.0),
},
{
"start": period2,
"end": period2 + timedelta(hours=1),
"start": period2.timestamp(),
"end": (period2 + timedelta(hours=1)).timestamp(),
"max": None,
"mean": None,
"min": None,
@ -2782,8 +2782,8 @@ async def test_adjust_sum_statistics_energy(
assert stats == {
statistic_id: [
{
"start": period1,
"end": period1 + timedelta(hours=1),
"start": period1.timestamp(),
"end": (period1 + timedelta(hours=1)).timestamp(),
"max": pytest.approx(None),
"mean": pytest.approx(None),
"min": pytest.approx(None),
@ -2792,8 +2792,8 @@ async def test_adjust_sum_statistics_energy(
"sum": pytest.approx(2.0),
},
{
"start": period2,
"end": period2 + timedelta(hours=1),
"start": period2.timestamp(),
"end": (period2 + timedelta(hours=1)).timestamp(),
"max": None,
"mean": None,
"min": None,
@ -2864,8 +2864,8 @@ async def test_adjust_sum_statistics_gas(
assert stats == {
statistic_id: [
{
"start": period1,
"end": period1 + timedelta(hours=1),
"start": period1.timestamp(),
"end": (period1 + timedelta(hours=1)).timestamp(),
"max": None,
"mean": None,
"min": None,
@ -2874,8 +2874,8 @@ async def test_adjust_sum_statistics_gas(
"sum": pytest.approx(2.0),
},
{
"start": period2,
"end": period2 + timedelta(hours=1),
"start": period2.timestamp(),
"end": (period2 + timedelta(hours=1)).timestamp(),
"max": None,
"mean": None,
"min": None,
@ -2932,8 +2932,8 @@ async def test_adjust_sum_statistics_gas(
assert stats == {
statistic_id: [
{
"start": period1,
"end": period1 + timedelta(hours=1),
"start": period1.timestamp(),
"end": (period1 + timedelta(hours=1)).timestamp(),
"max": pytest.approx(None),
"mean": pytest.approx(None),
"min": pytest.approx(None),
@ -2942,8 +2942,8 @@ async def test_adjust_sum_statistics_gas(
"sum": pytest.approx(2.0),
},
{
"start": period2,
"end": period2 + timedelta(hours=1),
"start": period2.timestamp(),
"end": (period2 + timedelta(hours=1)).timestamp(),
"max": None,
"mean": None,
"min": None,
@ -2973,8 +2973,8 @@ async def test_adjust_sum_statistics_gas(
assert stats == {
statistic_id: [
{
"start": period1,
"end": period1 + timedelta(hours=1),
"start": period1.timestamp(),
"end": (period1 + timedelta(hours=1)).timestamp(),
"max": pytest.approx(None),
"mean": pytest.approx(None),
"min": pytest.approx(None),
@ -2983,8 +2983,8 @@ async def test_adjust_sum_statistics_gas(
"sum": pytest.approx(2.0),
},
{
"start": period2,
"end": period2 + timedelta(hours=1),
"start": period2.timestamp(),
"end": (period2 + timedelta(hours=1)).timestamp(),
"max": None,
"mean": None,
"min": None,
@ -3070,8 +3070,8 @@ async def test_adjust_sum_statistics_errors(
assert stats == {
statistic_id: [
{
"start": period1,
"end": period1 + timedelta(hours=1),
"start": period1.timestamp(),
"end": (period1 + timedelta(hours=1)).timestamp(),
"max": None,
"mean": None,
"min": None,
@ -3080,8 +3080,8 @@ async def test_adjust_sum_statistics_errors(
"sum": pytest.approx(2.0 * factor),
},
{
"start": period2,
"end": period2 + timedelta(hours=1),
"start": period2.timestamp(),
"end": (period2 + timedelta(hours=1)).timestamp(),
"max": None,
"mean": None,
"min": None,

View File

@ -162,8 +162,8 @@ def test_compile_hourly_statistics(
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"mean": pytest.approx(mean),
"min": pytest.approx(min),
"max": pytest.approx(max),
@ -236,8 +236,8 @@ def test_compile_hourly_statistics_purged_state_changes(
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"mean": pytest.approx(mean),
"min": pytest.approx(min),
"max": pytest.approx(max),
@ -345,8 +345,8 @@ def test_compile_hourly_statistics_wrong_unit(hass_recorder, caplog, attributes)
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"mean": pytest.approx(13.050847),
"min": pytest.approx(-10.0),
"max": pytest.approx(30.0),
@ -357,8 +357,8 @@ def test_compile_hourly_statistics_wrong_unit(hass_recorder, caplog, attributes)
],
"sensor.test2": [
{
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"mean": 13.05084745762712,
"min": -10.0,
"max": 30.0,
@ -369,8 +369,8 @@ def test_compile_hourly_statistics_wrong_unit(hass_recorder, caplog, attributes)
],
"sensor.test3": [
{
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"mean": 13.05084745762712,
"min": -10.0,
"max": 30.0,
@ -381,8 +381,8 @@ def test_compile_hourly_statistics_wrong_unit(hass_recorder, caplog, attributes)
],
"sensor.test6": [
{
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"mean": pytest.approx(13.050847),
"min": pytest.approx(-10.0),
"max": pytest.approx(30.0),
@ -393,8 +393,8 @@ def test_compile_hourly_statistics_wrong_unit(hass_recorder, caplog, attributes)
],
"sensor.test7": [
{
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"mean": pytest.approx(13.050847),
"min": pytest.approx(-10.0),
"max": pytest.approx(30.0),
@ -503,32 +503,32 @@ async def test_compile_hourly_sum_statistics_amount(
expected_stats = {
"sensor.test1": [
{
"start": process_timestamp(period0),
"end": process_timestamp(period0_end),
"start": process_timestamp(period0).timestamp(),
"end": process_timestamp(period0_end).timestamp(),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp(period0),
"last_reset": process_timestamp(period0).timestamp(),
"state": pytest.approx(factor * seq[2]),
"sum": pytest.approx(factor * 10.0),
},
{
"start": process_timestamp(period1),
"end": process_timestamp(period1_end),
"start": process_timestamp(period1).timestamp(),
"end": process_timestamp(period1_end).timestamp(),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp(four),
"last_reset": process_timestamp(four).timestamp(),
"state": pytest.approx(factor * seq[5]),
"sum": pytest.approx(factor * 40.0),
},
{
"start": process_timestamp(period2),
"end": process_timestamp(period2_end),
"start": process_timestamp(period2).timestamp(),
"end": process_timestamp(period2_end).timestamp(),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp(four),
"last_reset": process_timestamp(four).timestamp(),
"state": pytest.approx(factor * seq[8]),
"sum": pytest.approx(factor * 70.0),
},
@ -695,22 +695,22 @@ def test_compile_hourly_sum_statistics_amount_reset_every_state_change(
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp(dt_util.as_local(one)),
"last_reset": process_timestamp(dt_util.as_local(one)).timestamp(),
"state": pytest.approx(factor * seq[7]),
"sum": pytest.approx(factor * (sum(seq) - seq[0])),
},
{
"start": process_timestamp(zero + timedelta(minutes=5)),
"end": process_timestamp(zero + timedelta(minutes=10)),
"start": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=10)).timestamp(),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp(dt_util.as_local(two)),
"last_reset": process_timestamp(dt_util.as_local(two)).timestamp(),
"state": pytest.approx(factor * seq[7]),
"sum": pytest.approx(factor * (2 * sum(seq) - seq[0])),
},
@ -794,12 +794,12 @@ def test_compile_hourly_sum_statistics_amount_invalid_last_reset(
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp(dt_util.as_local(one)),
"last_reset": process_timestamp(dt_util.as_local(one)).timestamp(),
"state": pytest.approx(factor * seq[7]),
"sum": pytest.approx(factor * (sum(seq) - seq[0] - seq[3])),
},
@ -880,12 +880,12 @@ def test_compile_hourly_sum_statistics_nan_inf_state(
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp(one),
"last_reset": process_timestamp(one).timestamp(),
"state": pytest.approx(factor * seq[7]),
"sum": pytest.approx(
factor * (seq[2] + seq[3] + seq[4] + seq[6] + seq[7])
@ -1008,8 +1008,8 @@ def test_compile_hourly_sum_statistics_negative_state(
stats = statistics_during_period(hass, zero, period="5minute")
assert stats[entity_id] == [
{
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"max": None,
"mean": None,
"min": None,
@ -1099,8 +1099,8 @@ def test_compile_hourly_sum_statistics_total_no_reset(
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(period0),
"end": process_timestamp(period0_end),
"start": process_timestamp(period0).timestamp(),
"end": process_timestamp(period0_end).timestamp(),
"max": None,
"mean": None,
"min": None,
@ -1109,8 +1109,8 @@ def test_compile_hourly_sum_statistics_total_no_reset(
"sum": pytest.approx(factor * 10.0),
},
{
"start": process_timestamp(period1),
"end": process_timestamp(period1_end),
"start": process_timestamp(period1).timestamp(),
"end": process_timestamp(period1_end).timestamp(),
"max": None,
"mean": None,
"min": None,
@ -1119,8 +1119,8 @@ def test_compile_hourly_sum_statistics_total_no_reset(
"sum": pytest.approx(factor * 30.0),
},
{
"start": process_timestamp(period2),
"end": process_timestamp(period2_end),
"start": process_timestamp(period2).timestamp(),
"end": process_timestamp(period2_end).timestamp(),
"max": None,
"mean": None,
"min": None,
@ -1201,8 +1201,8 @@ def test_compile_hourly_sum_statistics_total_increasing(
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(period0),
"end": process_timestamp(period0_end),
"start": process_timestamp(period0).timestamp(),
"end": process_timestamp(period0_end).timestamp(),
"max": None,
"mean": None,
"min": None,
@ -1211,8 +1211,8 @@ def test_compile_hourly_sum_statistics_total_increasing(
"sum": pytest.approx(factor * 10.0),
},
{
"start": process_timestamp(period1),
"end": process_timestamp(period1_end),
"start": process_timestamp(period1).timestamp(),
"end": process_timestamp(period1_end).timestamp(),
"max": None,
"mean": None,
"min": None,
@ -1221,8 +1221,8 @@ def test_compile_hourly_sum_statistics_total_increasing(
"sum": pytest.approx(factor * 50.0),
},
{
"start": process_timestamp(period2),
"end": process_timestamp(period2_end),
"start": process_timestamp(period2).timestamp(),
"end": process_timestamp(period2_end).timestamp(),
"max": None,
"mean": None,
"min": None,
@ -1315,8 +1315,8 @@ def test_compile_hourly_sum_statistics_total_increasing_small_dip(
"sensor.test1": [
{
"last_reset": None,
"start": process_timestamp(period0),
"end": process_timestamp(period0_end),
"start": process_timestamp(period0).timestamp(),
"end": process_timestamp(period0_end).timestamp(),
"max": None,
"mean": None,
"min": None,
@ -1325,8 +1325,8 @@ def test_compile_hourly_sum_statistics_total_increasing_small_dip(
},
{
"last_reset": None,
"start": process_timestamp(period1),
"end": process_timestamp(period1_end),
"start": process_timestamp(period1).timestamp(),
"end": process_timestamp(period1_end).timestamp(),
"max": None,
"mean": None,
"min": None,
@ -1335,8 +1335,8 @@ def test_compile_hourly_sum_statistics_total_increasing_small_dip(
},
{
"last_reset": None,
"start": process_timestamp(period2),
"end": process_timestamp(period2_end),
"start": process_timestamp(period2).timestamp(),
"end": process_timestamp(period2_end).timestamp(),
"max": None,
"mean": None,
"min": None,
@ -1408,32 +1408,32 @@ def test_compile_hourly_energy_statistics_unsupported(hass_recorder, caplog):
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(period0),
"end": process_timestamp(period0_end),
"start": process_timestamp(period0).timestamp(),
"end": process_timestamp(period0_end).timestamp(),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp(period0),
"last_reset": process_timestamp(period0).timestamp(),
"state": pytest.approx(20.0),
"sum": pytest.approx(10.0),
},
{
"start": process_timestamp(period1),
"end": process_timestamp(period1_end),
"start": process_timestamp(period1).timestamp(),
"end": process_timestamp(period1_end).timestamp(),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp(four),
"last_reset": process_timestamp(four).timestamp(),
"state": pytest.approx(40.0),
"sum": pytest.approx(40.0),
},
{
"start": process_timestamp(period2),
"end": process_timestamp(period2_end),
"start": process_timestamp(period2).timestamp(),
"end": process_timestamp(period2_end).timestamp(),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp(four),
"last_reset": process_timestamp(four).timestamp(),
"state": pytest.approx(70.0),
"sum": pytest.approx(70.0),
},
@ -1520,96 +1520,96 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog):
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(period0),
"end": process_timestamp(period0_end),
"start": process_timestamp(period0).timestamp(),
"end": process_timestamp(period0_end).timestamp(),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp(period0),
"last_reset": process_timestamp(period0).timestamp(),
"state": pytest.approx(20.0),
"sum": pytest.approx(10.0),
},
{
"start": process_timestamp(period1),
"end": process_timestamp(period1_end),
"start": process_timestamp(period1).timestamp(),
"end": process_timestamp(period1_end).timestamp(),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp(four),
"last_reset": process_timestamp(four).timestamp(),
"state": pytest.approx(40.0),
"sum": pytest.approx(40.0),
},
{
"start": process_timestamp(period2),
"end": process_timestamp(period2_end),
"start": process_timestamp(period2).timestamp(),
"end": process_timestamp(period2_end).timestamp(),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp(four),
"last_reset": process_timestamp(four).timestamp(),
"state": pytest.approx(70.0),
"sum": pytest.approx(70.0),
},
],
"sensor.test2": [
{
"start": process_timestamp(period0),
"end": process_timestamp(period0_end),
"start": process_timestamp(period0).timestamp(),
"end": process_timestamp(period0_end).timestamp(),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp(period0),
"last_reset": process_timestamp(period0).timestamp(),
"state": pytest.approx(130.0),
"sum": pytest.approx(20.0),
},
{
"start": process_timestamp(period1),
"end": process_timestamp(period1_end),
"start": process_timestamp(period1).timestamp(),
"end": process_timestamp(period1_end).timestamp(),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp(four),
"last_reset": process_timestamp(four).timestamp(),
"state": pytest.approx(45.0),
"sum": pytest.approx(-65.0),
},
{
"start": process_timestamp(period2),
"end": process_timestamp(period2_end),
"start": process_timestamp(period2).timestamp(),
"end": process_timestamp(period2_end).timestamp(),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp(four),
"last_reset": process_timestamp(four).timestamp(),
"state": pytest.approx(75.0),
"sum": pytest.approx(-35.0),
},
],
"sensor.test3": [
{
"start": process_timestamp(period0),
"end": process_timestamp(period0_end),
"start": process_timestamp(period0).timestamp(),
"end": process_timestamp(period0_end).timestamp(),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp(period0),
"last_reset": process_timestamp(period0).timestamp(),
"state": pytest.approx(5.0),
"sum": pytest.approx(5.0),
},
{
"start": process_timestamp(period1),
"end": process_timestamp(period1_end),
"start": process_timestamp(period1).timestamp(),
"end": process_timestamp(period1_end).timestamp(),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp(four),
"last_reset": process_timestamp(four).timestamp(),
"state": pytest.approx(50.0),
"sum": pytest.approx(60.0),
},
{
"start": process_timestamp(period2),
"end": process_timestamp(period2_end),
"start": process_timestamp(period2).timestamp(),
"end": process_timestamp(period2_end).timestamp(),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp(four),
"last_reset": process_timestamp(four).timestamp(),
"state": pytest.approx(90.0),
"sum": pytest.approx(100.0),
},
@ -1665,8 +1665,8 @@ def test_compile_hourly_statistics_unchanged(
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(four),
"end": process_timestamp(four + timedelta(minutes=5)),
"start": process_timestamp(four).timestamp(),
"end": process_timestamp(four + timedelta(minutes=5)).timestamp(),
"mean": pytest.approx(value),
"min": pytest.approx(value),
"max": pytest.approx(value),
@ -1697,8 +1697,8 @@ def test_compile_hourly_statistics_partially_unavailable(hass_recorder, caplog):
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"mean": pytest.approx(21.1864406779661),
"min": pytest.approx(10.0),
"max": pytest.approx(25.0),
@ -1766,8 +1766,8 @@ def test_compile_hourly_statistics_unavailable(
assert stats == {
"sensor.test2": [
{
"start": process_timestamp(four),
"end": process_timestamp(four + timedelta(minutes=5)),
"start": process_timestamp(four).timestamp(),
"end": process_timestamp(four + timedelta(minutes=5)).timestamp(),
"mean": pytest.approx(value),
"min": pytest.approx(value),
"max": pytest.approx(value),
@ -1988,8 +1988,8 @@ def test_compile_hourly_statistics_changing_units_1(
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"mean": pytest.approx(mean),
"min": pytest.approx(min),
"max": pytest.approx(max),
@ -2023,8 +2023,8 @@ def test_compile_hourly_statistics_changing_units_1(
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"mean": pytest.approx(mean),
"min": pytest.approx(min),
"max": pytest.approx(max),
@ -2166,8 +2166,8 @@ def test_compile_hourly_statistics_changing_units_3(
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"mean": pytest.approx(mean),
"min": pytest.approx(min),
"max": pytest.approx(max),
@ -2201,8 +2201,8 @@ def test_compile_hourly_statistics_changing_units_3(
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"mean": pytest.approx(mean),
"min": pytest.approx(min),
"max": pytest.approx(max),
@ -2274,8 +2274,8 @@ def test_compile_hourly_statistics_convert_units_1(
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"mean": pytest.approx(mean),
"min": pytest.approx(min),
"max": pytest.approx(max),
@ -2317,8 +2317,8 @@ def test_compile_hourly_statistics_convert_units_1(
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"mean": pytest.approx(mean * factor),
"min": pytest.approx(min * factor),
"max": pytest.approx(max * factor),
@ -2327,8 +2327,8 @@ def test_compile_hourly_statistics_convert_units_1(
"sum": None,
},
{
"start": process_timestamp(zero + timedelta(minutes=10)),
"end": process_timestamp(zero + timedelta(minutes=15)),
"start": process_timestamp(zero + timedelta(minutes=10)).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=15)).timestamp(),
"mean": pytest.approx(mean),
"min": pytest.approx(min),
"max": pytest.approx(max),
@ -2406,8 +2406,8 @@ def test_compile_hourly_statistics_equivalent_units_1(
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"mean": pytest.approx(mean),
"min": pytest.approx(min),
"max": pytest.approx(max),
@ -2437,8 +2437,8 @@ def test_compile_hourly_statistics_equivalent_units_1(
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"mean": pytest.approx(mean),
"min": pytest.approx(min),
"max": pytest.approx(max),
@ -2447,8 +2447,8 @@ def test_compile_hourly_statistics_equivalent_units_1(
"sum": None,
},
{
"start": process_timestamp(zero + timedelta(minutes=10)),
"end": process_timestamp(zero + timedelta(minutes=15)),
"start": process_timestamp(zero + timedelta(minutes=10)).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=15)).timestamp(),
"mean": pytest.approx(mean2),
"min": pytest.approx(min),
"max": pytest.approx(max),
@ -2521,8 +2521,10 @@ def test_compile_hourly_statistics_equivalent_units_2(
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(zero + timedelta(seconds=30 * 5)),
"end": process_timestamp(zero + timedelta(seconds=30 * 15)),
"start": process_timestamp(
zero + timedelta(seconds=30 * 5)
).timestamp(),
"end": process_timestamp(zero + timedelta(seconds=30 * 15)).timestamp(),
"mean": pytest.approx(mean),
"min": pytest.approx(min),
"max": pytest.approx(max),
@ -2590,8 +2592,8 @@ def test_compile_hourly_statistics_changing_device_class_1(
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"mean": pytest.approx(mean1),
"min": pytest.approx(min),
"max": pytest.approx(max),
@ -2635,8 +2637,8 @@ def test_compile_hourly_statistics_changing_device_class_1(
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"mean": pytest.approx(mean1),
"min": pytest.approx(min),
"max": pytest.approx(max),
@ -2645,8 +2647,8 @@ def test_compile_hourly_statistics_changing_device_class_1(
"sum": None,
},
{
"start": process_timestamp(zero + timedelta(minutes=10)),
"end": process_timestamp(zero + timedelta(minutes=15)),
"start": process_timestamp(zero + timedelta(minutes=10)).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=15)).timestamp(),
"mean": pytest.approx(mean2),
"min": pytest.approx(min),
"max": pytest.approx(max),
@ -2690,8 +2692,8 @@ def test_compile_hourly_statistics_changing_device_class_1(
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"mean": pytest.approx(mean1),
"min": pytest.approx(min),
"max": pytest.approx(max),
@ -2700,8 +2702,8 @@ def test_compile_hourly_statistics_changing_device_class_1(
"sum": None,
},
{
"start": process_timestamp(zero + timedelta(minutes=10)),
"end": process_timestamp(zero + timedelta(minutes=15)),
"start": process_timestamp(zero + timedelta(minutes=10)).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=15)).timestamp(),
"mean": pytest.approx(mean2),
"min": pytest.approx(min),
"max": pytest.approx(max),
@ -2710,8 +2712,8 @@ def test_compile_hourly_statistics_changing_device_class_1(
"sum": None,
},
{
"start": process_timestamp(zero + timedelta(minutes=20)),
"end": process_timestamp(zero + timedelta(minutes=25)),
"start": process_timestamp(zero + timedelta(minutes=20)).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=25)).timestamp(),
"mean": pytest.approx(mean2),
"min": pytest.approx(min),
"max": pytest.approx(max),
@ -2780,8 +2782,8 @@ def test_compile_hourly_statistics_changing_device_class_2(
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"mean": pytest.approx(mean),
"min": pytest.approx(min),
"max": pytest.approx(max),
@ -2825,8 +2827,8 @@ def test_compile_hourly_statistics_changing_device_class_2(
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"start": process_timestamp(zero).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
"mean": pytest.approx(mean),
"min": pytest.approx(min),
"max": pytest.approx(max),
@ -2835,8 +2837,8 @@ def test_compile_hourly_statistics_changing_device_class_2(
"sum": None,
},
{
"start": process_timestamp(zero + timedelta(minutes=10)),
"end": process_timestamp(zero + timedelta(minutes=15)),
"start": process_timestamp(zero + timedelta(minutes=10)).timestamp(),
"end": process_timestamp(zero + timedelta(minutes=15)).timestamp(),
"mean": pytest.approx(mean2),
"min": pytest.approx(min),
"max": pytest.approx(max),
@ -2954,8 +2956,8 @@ def test_compile_hourly_statistics_changing_state_class(
assert stats == {
"sensor.test1": [
{
"start": process_timestamp(period0),
"end": process_timestamp(period0_end),
"start": process_timestamp(period0).timestamp(),
"end": process_timestamp(period0_end).timestamp(),
"mean": pytest.approx(mean),
"min": pytest.approx(min),
"max": pytest.approx(max),
@ -2964,8 +2966,8 @@ def test_compile_hourly_statistics_changing_state_class(
"sum": None,
},
{
"start": process_timestamp(period1),
"end": process_timestamp(period1_end),
"start": process_timestamp(period1).timestamp(),
"end": process_timestamp(period1_end).timestamp(),
"mean": None,
"min": None,
"max": None,
@ -3204,8 +3206,8 @@ def test_compile_statistics_hourly_daily_monthly_summary(hass_recorder, caplog):
)
expected_stats[entity_id].append(
{
"start": process_timestamp(start),
"end": process_timestamp(end),
"start": process_timestamp(start).timestamp(),
"end": process_timestamp(end).timestamp(),
"mean": pytest.approx(expected_average),
"min": pytest.approx(expected_minimum),
"max": pytest.approx(expected_maximum),
@ -3261,8 +3263,8 @@ def test_compile_statistics_hourly_daily_monthly_summary(hass_recorder, caplog):
)
expected_stats[entity_id].append(
{
"start": process_timestamp(start),
"end": process_timestamp(end),
"start": process_timestamp(start).timestamp(),
"end": process_timestamp(end).timestamp(),
"mean": pytest.approx(expected_average),
"min": pytest.approx(expected_minimum),
"max": pytest.approx(expected_maximum),
@ -3318,8 +3320,8 @@ def test_compile_statistics_hourly_daily_monthly_summary(hass_recorder, caplog):
)
expected_stats[entity_id].append(
{
"start": process_timestamp(start),
"end": process_timestamp(end),
"start": process_timestamp(start).timestamp(),
"end": process_timestamp(end).timestamp(),
"mean": pytest.approx(expected_average),
"min": pytest.approx(expected_minimum),
"max": pytest.approx(expected_maximum),
@ -3375,8 +3377,8 @@ def test_compile_statistics_hourly_daily_monthly_summary(hass_recorder, caplog):
)
expected_stats[entity_id].append(
{
"start": process_timestamp(start),
"end": process_timestamp(end),
"start": process_timestamp(start).timestamp(),
"end": process_timestamp(end).timestamp(),
"mean": pytest.approx(expected_average),
"min": pytest.approx(expected_minimum),
"max": pytest.approx(expected_maximum),

View File

@ -43,7 +43,7 @@ async def test_async_setup_entry(recorder_mock, hass):
assert len(stats[statistic_id]) == 3
_sum = 0
for k, stat in enumerate(stats[statistic_id]):
assert stat["start"] == dt_util.parse_datetime(data[k]["from"])
assert stat["start"] == dt_util.parse_datetime(data[k]["from"]).timestamp()
assert stat["state"] == data[k][key]
assert stat["mean"] is None
assert stat["min"] is None