mirror of
https://github.com/home-assistant/core.git
synced 2025-07-23 05:07:41 +00:00
Add support for calculating daily and monthly fossil energy consumption (#59588)
This commit is contained in:
parent
49a27e12ad
commit
4a5238efa5
@ -2,18 +2,22 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from collections import defaultdict
|
||||||
|
from datetime import datetime, timedelta
|
||||||
import functools
|
import functools
|
||||||
|
from itertools import chain
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
from typing import Any, Awaitable, Callable, cast
|
from typing import Any, Awaitable, Callable, cast
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.components import websocket_api
|
from homeassistant.components import recorder, websocket_api
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers.integration_platform import (
|
from homeassistant.helpers.integration_platform import (
|
||||||
async_process_integration_platforms,
|
async_process_integration_platforms,
|
||||||
)
|
)
|
||||||
from homeassistant.helpers.singleton import singleton
|
from homeassistant.helpers.singleton import singleton
|
||||||
|
from homeassistant.util import dt as dt_util
|
||||||
|
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
from .data import (
|
from .data import (
|
||||||
@ -44,6 +48,7 @@ def async_setup(hass: HomeAssistant) -> None:
|
|||||||
websocket_api.async_register_command(hass, ws_info)
|
websocket_api.async_register_command(hass, ws_info)
|
||||||
websocket_api.async_register_command(hass, ws_validate)
|
websocket_api.async_register_command(hass, ws_validate)
|
||||||
websocket_api.async_register_command(hass, ws_solar_forecast)
|
websocket_api.async_register_command(hass, ws_solar_forecast)
|
||||||
|
websocket_api.async_register_command(hass, ws_get_fossil_energy_consumption)
|
||||||
|
|
||||||
|
|
||||||
@singleton("energy_platforms")
|
@singleton("energy_platforms")
|
||||||
@ -218,3 +223,143 @@ async def ws_solar_forecast(
|
|||||||
forecasts[config_entry_id] = forecast
|
forecasts[config_entry_id] = forecast
|
||||||
|
|
||||||
connection.send_result(msg["id"], forecasts)
|
connection.send_result(msg["id"], forecasts)
|
||||||
|
|
||||||
|
|
||||||
|
@websocket_api.websocket_command(
|
||||||
|
{
|
||||||
|
vol.Required("type"): "energy/fossil_energy_consumption",
|
||||||
|
vol.Required("start_time"): str,
|
||||||
|
vol.Required("end_time"): str,
|
||||||
|
vol.Required("energy_statistic_ids"): [str],
|
||||||
|
vol.Required("co2_statistic_id"): str,
|
||||||
|
vol.Required("period"): vol.Any("5minute", "hour", "day", "month"),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
@websocket_api.async_response
|
||||||
|
async def ws_get_fossil_energy_consumption(
|
||||||
|
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict
|
||||||
|
) -> None:
|
||||||
|
"""Calculate amount of fossil based energy."""
|
||||||
|
start_time_str = msg["start_time"]
|
||||||
|
end_time_str = msg["end_time"]
|
||||||
|
|
||||||
|
if start_time := dt_util.parse_datetime(start_time_str):
|
||||||
|
start_time = dt_util.as_utc(start_time)
|
||||||
|
else:
|
||||||
|
connection.send_error(msg["id"], "invalid_start_time", "Invalid start_time")
|
||||||
|
return
|
||||||
|
|
||||||
|
if end_time := dt_util.parse_datetime(end_time_str):
|
||||||
|
end_time = dt_util.as_utc(end_time)
|
||||||
|
else:
|
||||||
|
connection.send_error(msg["id"], "invalid_end_time", "Invalid end_time")
|
||||||
|
return
|
||||||
|
|
||||||
|
statistic_ids = list(msg["energy_statistic_ids"])
|
||||||
|
statistic_ids.append(msg["co2_statistic_id"])
|
||||||
|
|
||||||
|
# Fetch energy + CO2 statistics
|
||||||
|
statistics = await hass.async_add_executor_job(
|
||||||
|
recorder.statistics.statistics_during_period,
|
||||||
|
hass,
|
||||||
|
start_time,
|
||||||
|
end_time,
|
||||||
|
statistic_ids,
|
||||||
|
"hour",
|
||||||
|
True,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _combine_sum_statistics(
|
||||||
|
stats: dict[str, list[dict[str, Any]]], statistic_ids: list[str]
|
||||||
|
) -> dict[datetime, float]:
|
||||||
|
"""Combine multiple statistics, returns a dict indexed by start time."""
|
||||||
|
result: defaultdict[datetime, float] = defaultdict(float)
|
||||||
|
|
||||||
|
for statistics_id, stat in stats.items():
|
||||||
|
if statistics_id not in statistic_ids:
|
||||||
|
continue
|
||||||
|
for period in stat:
|
||||||
|
if period["sum"] is None:
|
||||||
|
continue
|
||||||
|
result[period["start"]] += period["sum"]
|
||||||
|
|
||||||
|
return {key: result[key] for key in sorted(result)}
|
||||||
|
|
||||||
|
def _calculate_deltas(sums: dict[datetime, float]) -> dict[datetime, float]:
|
||||||
|
prev: float | None = None
|
||||||
|
result: dict[datetime, float] = {}
|
||||||
|
for period, sum_ in sums.items():
|
||||||
|
if prev is not None:
|
||||||
|
result[period] = sum_ - prev
|
||||||
|
prev = sum_
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _reduce_deltas(
|
||||||
|
stat_list: list[dict[str, Any]],
|
||||||
|
same_period: Callable[[datetime, datetime], bool],
|
||||||
|
period_start_end: Callable[[datetime], tuple[datetime, datetime]],
|
||||||
|
period: timedelta,
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
"""Reduce hourly deltas to daily or monthly deltas."""
|
||||||
|
result: list[dict[str, Any]] = []
|
||||||
|
deltas: list[float] = []
|
||||||
|
prev_stat: dict[str, Any] = stat_list[0]
|
||||||
|
|
||||||
|
# Loop over the hourly deltas + a fake entry to end the period
|
||||||
|
for statistic in chain(
|
||||||
|
stat_list, ({"start": stat_list[-1]["start"] + period},)
|
||||||
|
):
|
||||||
|
if not same_period(prev_stat["start"], statistic["start"]):
|
||||||
|
start, _ = period_start_end(prev_stat["start"])
|
||||||
|
# The previous statistic was the last entry of the period
|
||||||
|
result.append(
|
||||||
|
{
|
||||||
|
"start": start.isoformat(),
|
||||||
|
"delta": sum(deltas),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
deltas = []
|
||||||
|
if statistic.get("delta") is not None:
|
||||||
|
deltas.append(statistic["delta"])
|
||||||
|
prev_stat = statistic
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
merged_energy_statistics = _combine_sum_statistics(
|
||||||
|
statistics, msg["energy_statistic_ids"]
|
||||||
|
)
|
||||||
|
energy_deltas = _calculate_deltas(merged_energy_statistics)
|
||||||
|
indexed_co2_statistics = {
|
||||||
|
period["start"]: period["mean"]
|
||||||
|
for period in statistics.get(msg["co2_statistic_id"], {})
|
||||||
|
}
|
||||||
|
|
||||||
|
# Calculate amount of fossil based energy, assume 100% fossil if missing
|
||||||
|
fossil_energy = [
|
||||||
|
{"start": start, "delta": delta * indexed_co2_statistics.get(start, 100) / 100}
|
||||||
|
for start, delta in energy_deltas.items()
|
||||||
|
]
|
||||||
|
|
||||||
|
if msg["period"] == "hour":
|
||||||
|
reduced_fossil_energy = [
|
||||||
|
{"start": period["start"].isoformat(), "delta": period["delta"]}
|
||||||
|
for period in fossil_energy
|
||||||
|
]
|
||||||
|
|
||||||
|
elif msg["period"] == "day":
|
||||||
|
reduced_fossil_energy = _reduce_deltas(
|
||||||
|
fossil_energy,
|
||||||
|
recorder.statistics.same_day,
|
||||||
|
recorder.statistics.day_start_end,
|
||||||
|
timedelta(days=1),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
reduced_fossil_energy = _reduce_deltas(
|
||||||
|
fossil_energy,
|
||||||
|
recorder.statistics.same_month,
|
||||||
|
recorder.statistics.month_start_end,
|
||||||
|
timedelta(days=1),
|
||||||
|
)
|
||||||
|
|
||||||
|
result = {period["start"]: period["delta"] for period in reduced_fossil_energy}
|
||||||
|
connection.send_result(msg["id"], result)
|
||||||
|
@ -697,8 +697,8 @@ def _reduce_statistics(
|
|||||||
"mean": mean(mean_values) if mean_values else None,
|
"mean": mean(mean_values) if mean_values else None,
|
||||||
"min": min(min_values) if min_values else None,
|
"min": min(min_values) if min_values else None,
|
||||||
"max": max(max_values) if max_values else None,
|
"max": max(max_values) if max_values else None,
|
||||||
"last_reset": prev_stat["last_reset"],
|
"last_reset": prev_stat.get("last_reset"),
|
||||||
"state": prev_stat["state"],
|
"state": prev_stat.get("state"),
|
||||||
"sum": prev_stat["sum"],
|
"sum": prev_stat["sum"],
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@ -716,50 +716,54 @@ def _reduce_statistics(
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def same_day(time1: datetime, time2: datetime) -> bool:
|
||||||
|
"""Return True if time1 and time2 are in the same date."""
|
||||||
|
date1 = dt_util.as_local(time1).date()
|
||||||
|
date2 = dt_util.as_local(time2).date()
|
||||||
|
return date1 == date2
|
||||||
|
|
||||||
|
|
||||||
|
def day_start_end(time: datetime) -> tuple[datetime, datetime]:
|
||||||
|
"""Return the start and end of the period (day) time is within."""
|
||||||
|
start = dt_util.as_utc(
|
||||||
|
dt_util.as_local(time).replace(hour=0, minute=0, second=0, microsecond=0)
|
||||||
|
)
|
||||||
|
end = start + timedelta(days=1)
|
||||||
|
return (start, end)
|
||||||
|
|
||||||
|
|
||||||
def _reduce_statistics_per_day(
|
def _reduce_statistics_per_day(
|
||||||
stats: dict[str, list[dict[str, Any]]]
|
stats: dict[str, list[dict[str, Any]]]
|
||||||
) -> dict[str, list[dict[str, Any]]]:
|
) -> dict[str, list[dict[str, Any]]]:
|
||||||
"""Reduce hourly statistics to daily statistics."""
|
"""Reduce hourly statistics to daily statistics."""
|
||||||
|
|
||||||
def same_period(time1: datetime, time2: datetime) -> bool:
|
return _reduce_statistics(stats, same_day, day_start_end, timedelta(days=1))
|
||||||
"""Return True if time1 and time2 are in the same date."""
|
|
||||||
date1 = dt_util.as_local(time1).date()
|
|
||||||
date2 = dt_util.as_local(time2).date()
|
|
||||||
return date1 == date2
|
|
||||||
|
|
||||||
def period_start_end(time: datetime) -> tuple[datetime, datetime]:
|
|
||||||
"""Return the start and end of the period (day) time is within."""
|
|
||||||
start = dt_util.as_utc(
|
|
||||||
dt_util.as_local(time).replace(hour=0, minute=0, second=0, microsecond=0)
|
|
||||||
)
|
|
||||||
end = start + timedelta(days=1)
|
|
||||||
return (start, end)
|
|
||||||
|
|
||||||
return _reduce_statistics(stats, same_period, period_start_end, timedelta(days=1))
|
def same_month(time1: datetime, time2: datetime) -> bool:
|
||||||
|
"""Return True if time1 and time2 are in the same year and month."""
|
||||||
|
date1 = dt_util.as_local(time1).date()
|
||||||
|
date2 = dt_util.as_local(time2).date()
|
||||||
|
return (date1.year, date1.month) == (date2.year, date2.month)
|
||||||
|
|
||||||
|
|
||||||
|
def month_start_end(time: datetime) -> tuple[datetime, datetime]:
|
||||||
|
"""Return the start and end of the period (month) time is within."""
|
||||||
|
start_local = dt_util.as_local(time).replace(
|
||||||
|
day=1, hour=0, minute=0, second=0, microsecond=0
|
||||||
|
)
|
||||||
|
start = dt_util.as_utc(start_local)
|
||||||
|
end_local = (start_local + timedelta(days=31)).replace(day=1)
|
||||||
|
end = dt_util.as_utc(end_local)
|
||||||
|
return (start, end)
|
||||||
|
|
||||||
|
|
||||||
def _reduce_statistics_per_month(
|
def _reduce_statistics_per_month(
|
||||||
stats: dict[str, list[dict[str, Any]]]
|
stats: dict[str, list[dict[str, Any]]],
|
||||||
) -> dict[str, list[dict[str, Any]]]:
|
) -> dict[str, list[dict[str, Any]]]:
|
||||||
"""Reduce hourly statistics to monthly statistics."""
|
"""Reduce hourly statistics to monthly statistics."""
|
||||||
|
|
||||||
def same_period(time1: datetime, time2: datetime) -> bool:
|
return _reduce_statistics(stats, same_month, month_start_end, timedelta(days=31))
|
||||||
"""Return True if time1 and time2 are in the same year and month."""
|
|
||||||
date1 = dt_util.as_local(time1).date()
|
|
||||||
date2 = dt_util.as_local(time2).date()
|
|
||||||
return (date1.year, date1.month) == (date2.year, date2.month)
|
|
||||||
|
|
||||||
def period_start_end(time: datetime) -> tuple[datetime, datetime]:
|
|
||||||
"""Return the start and end of the period (month) time is within."""
|
|
||||||
start_local = dt_util.as_local(time).replace(
|
|
||||||
day=1, hour=0, minute=0, second=0, microsecond=0
|
|
||||||
)
|
|
||||||
start = dt_util.as_utc(start_local)
|
|
||||||
end_local = (start_local + timedelta(days=31)).replace(day=1)
|
|
||||||
end = dt_util.as_utc(end_local)
|
|
||||||
return (start, end)
|
|
||||||
|
|
||||||
return _reduce_statistics(stats, same_period, period_start_end, timedelta(days=31))
|
|
||||||
|
|
||||||
|
|
||||||
def statistics_during_period(
|
def statistics_during_period(
|
||||||
@ -768,6 +772,7 @@ def statistics_during_period(
|
|||||||
end_time: datetime | None = None,
|
end_time: datetime | None = None,
|
||||||
statistic_ids: list[str] | None = None,
|
statistic_ids: list[str] | None = None,
|
||||||
period: Literal["5minute", "day", "hour", "month"] = "hour",
|
period: Literal["5minute", "day", "hour", "month"] = "hour",
|
||||||
|
start_time_as_datetime: bool = False,
|
||||||
) -> dict[str, list[dict[str, Any]]]:
|
) -> dict[str, list[dict[str, Any]]]:
|
||||||
"""Return statistics during UTC period start_time - end_time for the statistic_ids.
|
"""Return statistics during UTC period start_time - end_time for the statistic_ids.
|
||||||
|
|
||||||
@ -808,7 +813,15 @@ def statistics_during_period(
|
|||||||
# Return statistics combined with metadata
|
# Return statistics combined with metadata
|
||||||
if period not in ("day", "month"):
|
if period not in ("day", "month"):
|
||||||
return _sorted_statistics_to_dict(
|
return _sorted_statistics_to_dict(
|
||||||
hass, session, stats, statistic_ids, metadata, True, table, start_time
|
hass,
|
||||||
|
session,
|
||||||
|
stats,
|
||||||
|
statistic_ids,
|
||||||
|
metadata,
|
||||||
|
True,
|
||||||
|
table,
|
||||||
|
start_time,
|
||||||
|
start_time_as_datetime,
|
||||||
)
|
)
|
||||||
|
|
||||||
result = _sorted_statistics_to_dict(
|
result = _sorted_statistics_to_dict(
|
||||||
|
@ -4,9 +4,17 @@ from unittest.mock import AsyncMock, Mock
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from homeassistant.components.energy import data, is_configured
|
from homeassistant.components.energy import data, is_configured
|
||||||
|
from homeassistant.components.recorder.statistics import async_add_external_statistics
|
||||||
from homeassistant.setup import async_setup_component
|
from homeassistant.setup import async_setup_component
|
||||||
|
from homeassistant.util import dt as dt_util
|
||||||
|
|
||||||
from tests.common import MockConfigEntry, flush_store, mock_platform
|
from tests.common import (
|
||||||
|
MockConfigEntry,
|
||||||
|
flush_store,
|
||||||
|
init_recorder_component,
|
||||||
|
mock_platform,
|
||||||
|
)
|
||||||
|
from tests.components.recorder.common import async_wait_recording_done_without_instance
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
@ -289,3 +297,525 @@ async def test_get_solar_forecast(hass, hass_ws_client, mock_energy_platform) ->
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.freeze_time("2021-08-01 00:00:00+00:00")
|
||||||
|
async def test_fossil_energy_consumption_no_co2(hass, hass_ws_client):
|
||||||
|
"""Test fossil_energy_consumption when co2 data is missing."""
|
||||||
|
now = dt_util.utcnow()
|
||||||
|
later = dt_util.as_utc(dt_util.parse_datetime("2022-09-01 00:00:00"))
|
||||||
|
|
||||||
|
await hass.async_add_executor_job(init_recorder_component, hass)
|
||||||
|
await async_setup_component(hass, "history", {})
|
||||||
|
await async_setup_component(hass, "sensor", {})
|
||||||
|
|
||||||
|
period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
|
||||||
|
period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00"))
|
||||||
|
period2_day_start = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 00:00:00"))
|
||||||
|
period3 = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
|
||||||
|
period4 = dt_util.as_utc(dt_util.parse_datetime("2021-10-31 23:00:00"))
|
||||||
|
period4_day_start = dt_util.as_utc(dt_util.parse_datetime("2021-10-31 00:00:00"))
|
||||||
|
|
||||||
|
external_energy_statistics_1 = (
|
||||||
|
{
|
||||||
|
"start": period1,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 0,
|
||||||
|
"sum": 2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period2,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 1,
|
||||||
|
"sum": 3,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period3,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 2,
|
||||||
|
"sum": 5,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period4,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 3,
|
||||||
|
"sum": 8,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
external_energy_metadata_1 = {
|
||||||
|
"has_mean": False,
|
||||||
|
"has_sum": True,
|
||||||
|
"name": "Total imported energy",
|
||||||
|
"source": "test",
|
||||||
|
"statistic_id": "test:total_energy_import_tariff_1",
|
||||||
|
"unit_of_measurement": "kWh",
|
||||||
|
}
|
||||||
|
external_energy_statistics_2 = (
|
||||||
|
{
|
||||||
|
"start": period1,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 0,
|
||||||
|
"sum": 20,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period2,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 1,
|
||||||
|
"sum": 30,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period3,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 2,
|
||||||
|
"sum": 50,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period4,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 3,
|
||||||
|
"sum": 80,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
external_energy_metadata_2 = {
|
||||||
|
"has_mean": False,
|
||||||
|
"has_sum": True,
|
||||||
|
"name": "Total imported energy",
|
||||||
|
"source": "test",
|
||||||
|
"statistic_id": "test:total_energy_import_tariff_2",
|
||||||
|
"unit_of_measurement": "kWh",
|
||||||
|
}
|
||||||
|
|
||||||
|
async_add_external_statistics(
|
||||||
|
hass, external_energy_metadata_1, external_energy_statistics_1
|
||||||
|
)
|
||||||
|
async_add_external_statistics(
|
||||||
|
hass, external_energy_metadata_2, external_energy_statistics_2
|
||||||
|
)
|
||||||
|
await async_wait_recording_done_without_instance(hass)
|
||||||
|
|
||||||
|
client = await hass_ws_client()
|
||||||
|
await client.send_json(
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"type": "energy/fossil_energy_consumption",
|
||||||
|
"start_time": now.isoformat(),
|
||||||
|
"end_time": later.isoformat(),
|
||||||
|
"energy_statistic_ids": [
|
||||||
|
"test:total_energy_import_tariff_1",
|
||||||
|
"test:total_energy_import_tariff_2",
|
||||||
|
],
|
||||||
|
"co2_statistic_id": "test:co2_ratio_missing",
|
||||||
|
"period": "hour",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {
|
||||||
|
period2.isoformat(): pytest.approx(33.0 - 22.0),
|
||||||
|
period3.isoformat(): pytest.approx(55.0 - 33.0),
|
||||||
|
period4.isoformat(): pytest.approx(88.0 - 55.0),
|
||||||
|
}
|
||||||
|
|
||||||
|
await client.send_json(
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"type": "energy/fossil_energy_consumption",
|
||||||
|
"start_time": now.isoformat(),
|
||||||
|
"end_time": later.isoformat(),
|
||||||
|
"energy_statistic_ids": [
|
||||||
|
"test:total_energy_import_tariff_1",
|
||||||
|
"test:total_energy_import_tariff_2",
|
||||||
|
],
|
||||||
|
"co2_statistic_id": "test:co2_ratio_missing",
|
||||||
|
"period": "day",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {
|
||||||
|
period2_day_start.isoformat(): pytest.approx(33.0 - 22.0),
|
||||||
|
period3.isoformat(): pytest.approx(55.0 - 33.0),
|
||||||
|
period4_day_start.isoformat(): pytest.approx(88.0 - 55.0),
|
||||||
|
}
|
||||||
|
|
||||||
|
await client.send_json(
|
||||||
|
{
|
||||||
|
"id": 3,
|
||||||
|
"type": "energy/fossil_energy_consumption",
|
||||||
|
"start_time": now.isoformat(),
|
||||||
|
"end_time": later.isoformat(),
|
||||||
|
"energy_statistic_ids": [
|
||||||
|
"test:total_energy_import_tariff_1",
|
||||||
|
"test:total_energy_import_tariff_2",
|
||||||
|
],
|
||||||
|
"co2_statistic_id": "test:co2_ratio_missing",
|
||||||
|
"period": "month",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {
|
||||||
|
period1.isoformat(): pytest.approx(33.0 - 22.0),
|
||||||
|
period3.isoformat(): pytest.approx((55.0 - 33.0) + (88.0 - 55.0)),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.freeze_time("2021-08-01 00:00:00+00:00")
|
||||||
|
async def test_fossil_energy_consumption_hole(hass, hass_ws_client):
|
||||||
|
"""Test fossil_energy_consumption when some data points lack sum."""
|
||||||
|
now = dt_util.utcnow()
|
||||||
|
later = dt_util.as_utc(dt_util.parse_datetime("2022-09-01 00:00:00"))
|
||||||
|
|
||||||
|
await hass.async_add_executor_job(init_recorder_component, hass)
|
||||||
|
await async_setup_component(hass, "history", {})
|
||||||
|
await async_setup_component(hass, "sensor", {})
|
||||||
|
|
||||||
|
period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
|
||||||
|
period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00"))
|
||||||
|
period3 = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
|
||||||
|
period4 = dt_util.as_utc(dt_util.parse_datetime("2021-10-31 23:00:00"))
|
||||||
|
|
||||||
|
external_energy_statistics_1 = (
|
||||||
|
{
|
||||||
|
"start": period1,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 0,
|
||||||
|
"sum": None,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period2,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 1,
|
||||||
|
"sum": 3,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period3,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 2,
|
||||||
|
"sum": 5,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period4,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 3,
|
||||||
|
"sum": 8,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
external_energy_metadata_1 = {
|
||||||
|
"has_mean": False,
|
||||||
|
"has_sum": True,
|
||||||
|
"name": "Total imported energy",
|
||||||
|
"source": "test",
|
||||||
|
"statistic_id": "test:total_energy_import_tariff_1",
|
||||||
|
"unit_of_measurement": "kWh",
|
||||||
|
}
|
||||||
|
external_energy_statistics_2 = (
|
||||||
|
{
|
||||||
|
"start": period1,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 0,
|
||||||
|
"sum": 20,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period2,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 1,
|
||||||
|
"sum": None,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period3,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 2,
|
||||||
|
"sum": 50,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period4,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 3,
|
||||||
|
"sum": 80,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
external_energy_metadata_2 = {
|
||||||
|
"has_mean": False,
|
||||||
|
"has_sum": True,
|
||||||
|
"name": "Total imported energy",
|
||||||
|
"source": "test",
|
||||||
|
"statistic_id": "test:total_energy_import_tariff_2",
|
||||||
|
"unit_of_measurement": "kWh",
|
||||||
|
}
|
||||||
|
|
||||||
|
async_add_external_statistics(
|
||||||
|
hass, external_energy_metadata_1, external_energy_statistics_1
|
||||||
|
)
|
||||||
|
async_add_external_statistics(
|
||||||
|
hass, external_energy_metadata_2, external_energy_statistics_2
|
||||||
|
)
|
||||||
|
await async_wait_recording_done_without_instance(hass)
|
||||||
|
|
||||||
|
client = await hass_ws_client()
|
||||||
|
await client.send_json(
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"type": "energy/fossil_energy_consumption",
|
||||||
|
"start_time": now.isoformat(),
|
||||||
|
"end_time": later.isoformat(),
|
||||||
|
"energy_statistic_ids": [
|
||||||
|
"test:total_energy_import_tariff_1",
|
||||||
|
"test:total_energy_import_tariff_2",
|
||||||
|
],
|
||||||
|
"co2_statistic_id": "test:co2_ratio_missing",
|
||||||
|
"period": "hour",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {
|
||||||
|
period2.isoformat(): pytest.approx(3.0 - 20.0),
|
||||||
|
period3.isoformat(): pytest.approx(55.0 - 3.0),
|
||||||
|
period4.isoformat(): pytest.approx(88.0 - 55.0),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.freeze_time("2021-08-01 00:00:00+00:00")
|
||||||
|
async def test_fossil_energy_consumption(hass, hass_ws_client):
|
||||||
|
"""Test fossil_energy_consumption with co2 sensor data."""
|
||||||
|
now = dt_util.utcnow()
|
||||||
|
later = dt_util.as_utc(dt_util.parse_datetime("2022-09-01 00:00:00"))
|
||||||
|
|
||||||
|
await hass.async_add_executor_job(init_recorder_component, hass)
|
||||||
|
await async_setup_component(hass, "history", {})
|
||||||
|
await async_setup_component(hass, "sensor", {})
|
||||||
|
|
||||||
|
period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
|
||||||
|
period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00"))
|
||||||
|
period2_day_start = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 00:00:00"))
|
||||||
|
period3 = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
|
||||||
|
period4 = dt_util.as_utc(dt_util.parse_datetime("2021-10-31 23:00:00"))
|
||||||
|
period4_day_start = dt_util.as_utc(dt_util.parse_datetime("2021-10-31 00:00:00"))
|
||||||
|
|
||||||
|
external_energy_statistics_1 = (
|
||||||
|
{
|
||||||
|
"start": period1,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 0,
|
||||||
|
"sum": 2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period2,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 1,
|
||||||
|
"sum": 3,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period3,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 2,
|
||||||
|
"sum": 4,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period4,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 3,
|
||||||
|
"sum": 5,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
external_energy_metadata_1 = {
|
||||||
|
"has_mean": False,
|
||||||
|
"has_sum": True,
|
||||||
|
"name": "Total imported energy",
|
||||||
|
"source": "test",
|
||||||
|
"statistic_id": "test:total_energy_import_tariff_1",
|
||||||
|
"unit_of_measurement": "kWh",
|
||||||
|
}
|
||||||
|
external_energy_statistics_2 = (
|
||||||
|
{
|
||||||
|
"start": period1,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 0,
|
||||||
|
"sum": 20,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period2,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 1,
|
||||||
|
"sum": 30,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period3,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 2,
|
||||||
|
"sum": 40,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period4,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 3,
|
||||||
|
"sum": 50,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
external_energy_metadata_2 = {
|
||||||
|
"has_mean": False,
|
||||||
|
"has_sum": True,
|
||||||
|
"name": "Total imported energy",
|
||||||
|
"source": "test",
|
||||||
|
"statistic_id": "test:total_energy_import_tariff_2",
|
||||||
|
"unit_of_measurement": "kWh",
|
||||||
|
}
|
||||||
|
external_co2_statistics = (
|
||||||
|
{
|
||||||
|
"start": period1,
|
||||||
|
"last_reset": None,
|
||||||
|
"mean": 10,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period2,
|
||||||
|
"last_reset": None,
|
||||||
|
"mean": 30,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period3,
|
||||||
|
"last_reset": None,
|
||||||
|
"mean": 60,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period4,
|
||||||
|
"last_reset": None,
|
||||||
|
"mean": 90,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
external_co2_metadata = {
|
||||||
|
"has_mean": True,
|
||||||
|
"has_sum": False,
|
||||||
|
"name": "Fossil percentage",
|
||||||
|
"source": "test",
|
||||||
|
"statistic_id": "test:fossil_percentage",
|
||||||
|
"unit_of_measurement": "%",
|
||||||
|
}
|
||||||
|
|
||||||
|
async_add_external_statistics(
|
||||||
|
hass, external_energy_metadata_1, external_energy_statistics_1
|
||||||
|
)
|
||||||
|
async_add_external_statistics(
|
||||||
|
hass, external_energy_metadata_2, external_energy_statistics_2
|
||||||
|
)
|
||||||
|
async_add_external_statistics(hass, external_co2_metadata, external_co2_statistics)
|
||||||
|
await async_wait_recording_done_without_instance(hass)
|
||||||
|
|
||||||
|
client = await hass_ws_client()
|
||||||
|
await client.send_json(
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"type": "energy/fossil_energy_consumption",
|
||||||
|
"start_time": now.isoformat(),
|
||||||
|
"end_time": later.isoformat(),
|
||||||
|
"energy_statistic_ids": [
|
||||||
|
"test:total_energy_import_tariff_1",
|
||||||
|
"test:total_energy_import_tariff_2",
|
||||||
|
],
|
||||||
|
"co2_statistic_id": "test:fossil_percentage",
|
||||||
|
"period": "hour",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {
|
||||||
|
period2.isoformat(): pytest.approx((33.0 - 22.0) * 0.3),
|
||||||
|
period3.isoformat(): pytest.approx((44.0 - 33.0) * 0.6),
|
||||||
|
period4.isoformat(): pytest.approx((55.0 - 44.0) * 0.9),
|
||||||
|
}
|
||||||
|
|
||||||
|
await client.send_json(
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"type": "energy/fossil_energy_consumption",
|
||||||
|
"start_time": now.isoformat(),
|
||||||
|
"end_time": later.isoformat(),
|
||||||
|
"energy_statistic_ids": [
|
||||||
|
"test:total_energy_import_tariff_1",
|
||||||
|
"test:total_energy_import_tariff_2",
|
||||||
|
],
|
||||||
|
"co2_statistic_id": "test:fossil_percentage",
|
||||||
|
"period": "day",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {
|
||||||
|
period2_day_start.isoformat(): pytest.approx((33.0 - 22.0) * 0.3),
|
||||||
|
period3.isoformat(): pytest.approx((44.0 - 33.0) * 0.6),
|
||||||
|
period4_day_start.isoformat(): pytest.approx((55.0 - 44.0) * 0.9),
|
||||||
|
}
|
||||||
|
|
||||||
|
await client.send_json(
|
||||||
|
{
|
||||||
|
"id": 3,
|
||||||
|
"type": "energy/fossil_energy_consumption",
|
||||||
|
"start_time": now.isoformat(),
|
||||||
|
"end_time": later.isoformat(),
|
||||||
|
"energy_statistic_ids": [
|
||||||
|
"test:total_energy_import_tariff_1",
|
||||||
|
"test:total_energy_import_tariff_2",
|
||||||
|
],
|
||||||
|
"co2_statistic_id": "test:fossil_percentage",
|
||||||
|
"period": "month",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {
|
||||||
|
period1.isoformat(): pytest.approx((33.0 - 22.0) * 0.3),
|
||||||
|
period3.isoformat(): pytest.approx(
|
||||||
|
((44.0 - 33.0) * 0.6) + ((55.0 - 44.0) * 0.9)
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async def test_fossil_energy_consumption_checks(hass, hass_ws_client):
|
||||||
|
"""Test fossil_energy_consumption parameter validation."""
|
||||||
|
client = await hass_ws_client(hass)
|
||||||
|
now = dt_util.utcnow()
|
||||||
|
|
||||||
|
await client.send_json(
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"type": "energy/fossil_energy_consumption",
|
||||||
|
"start_time": "donald_duck",
|
||||||
|
"end_time": now.isoformat(),
|
||||||
|
"energy_statistic_ids": [
|
||||||
|
"test:total_energy_import_tariff_1",
|
||||||
|
"test:total_energy_import_tariff_2",
|
||||||
|
],
|
||||||
|
"co2_statistic_id": "test:fossil_percentage",
|
||||||
|
"period": "hour",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
msg = await client.receive_json()
|
||||||
|
|
||||||
|
assert msg["id"] == 1
|
||||||
|
assert not msg["success"]
|
||||||
|
assert msg["error"] == {
|
||||||
|
"code": "invalid_start_time",
|
||||||
|
"message": "Invalid start_time",
|
||||||
|
}
|
||||||
|
|
||||||
|
await client.send_json(
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"type": "energy/fossil_energy_consumption",
|
||||||
|
"start_time": now.isoformat(),
|
||||||
|
"end_time": "donald_duck",
|
||||||
|
"energy_statistic_ids": [
|
||||||
|
"test:total_energy_import_tariff_1",
|
||||||
|
"test:total_energy_import_tariff_2",
|
||||||
|
],
|
||||||
|
"co2_statistic_id": "test:fossil_percentage",
|
||||||
|
"period": "hour",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
msg = await client.receive_json()
|
||||||
|
|
||||||
|
assert msg["id"] == 2
|
||||||
|
assert not msg["success"]
|
||||||
|
assert msg["error"] == {"code": "invalid_end_time", "message": "Invalid end_time"}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user