mirror of
https://github.com/home-assistant/core.git
synced 2025-07-23 21:27:38 +00:00
Tibber cost statistics (#63626)
* Tibber cost statistics Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net> * Tibber cost statistics Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net> * unit Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net> * unit Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net> * Update homeassistant/components/tibber/sensor.py Co-authored-by: Martin Hjelmare <marhje52@gmail.com> * Update homeassistant/components/tibber/sensor.py Co-authored-by: Martin Hjelmare <marhje52@gmail.com> * break lines Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net> Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
This commit is contained in:
parent
1d24fb7ad9
commit
0e0ef0aa17
@ -561,67 +561,78 @@ class TibberDataCoordinator(update_coordinator.DataUpdateCoordinator):
|
|||||||
for home in self._tibber_connection.get_homes():
|
for home in self._tibber_connection.get_homes():
|
||||||
if not home.hourly_consumption_data:
|
if not home.hourly_consumption_data:
|
||||||
continue
|
continue
|
||||||
|
for sensor_type in (
|
||||||
statistic_id = (
|
"consumption",
|
||||||
f"{TIBBER_DOMAIN}:energy_consumption_{home.home_id.replace('-', '')}"
|
"totalCost",
|
||||||
)
|
):
|
||||||
|
statistic_id = (
|
||||||
last_stats = await self.hass.async_add_executor_job(
|
f"{TIBBER_DOMAIN}:energy_"
|
||||||
get_last_statistics, self.hass, 1, statistic_id, True
|
f"{sensor_type.lower()}_"
|
||||||
)
|
f"{home.home_id.replace('-', '')}"
|
||||||
|
|
||||||
if not last_stats:
|
|
||||||
# First time we insert 5 years of data (if available)
|
|
||||||
hourly_consumption_data = await home.get_historic_data(5 * 365 * 24)
|
|
||||||
|
|
||||||
_sum = 0
|
|
||||||
last_stats_time = None
|
|
||||||
else:
|
|
||||||
# hourly_consumption_data contains the last 30 days of consumption data.
|
|
||||||
# We update the statistics with the last 30 days of data to handle corrections in the data.
|
|
||||||
hourly_consumption_data = home.hourly_consumption_data
|
|
||||||
|
|
||||||
start = dt_util.parse_datetime(
|
|
||||||
hourly_consumption_data[0]["from"]
|
|
||||||
) - timedelta(hours=1)
|
|
||||||
stat = await self.hass.async_add_executor_job(
|
|
||||||
statistics_during_period,
|
|
||||||
self.hass,
|
|
||||||
start,
|
|
||||||
None,
|
|
||||||
[statistic_id],
|
|
||||||
"hour",
|
|
||||||
True,
|
|
||||||
)
|
)
|
||||||
_sum = stat[statistic_id][0]["sum"]
|
|
||||||
last_stats_time = stat[statistic_id][0]["start"]
|
|
||||||
|
|
||||||
statistics = []
|
last_stats = await self.hass.async_add_executor_job(
|
||||||
|
get_last_statistics, self.hass, 1, statistic_id, True
|
||||||
|
)
|
||||||
|
|
||||||
for data in hourly_consumption_data:
|
if not last_stats:
|
||||||
if data.get("consumption") is None:
|
# First time we insert 5 years of data (if available)
|
||||||
continue
|
hourly_consumption_data = await home.get_historic_data(5 * 365 * 24)
|
||||||
|
|
||||||
start = dt_util.parse_datetime(data["from"])
|
_sum = 0
|
||||||
if last_stats_time is not None and start <= last_stats_time:
|
last_stats_time = None
|
||||||
continue
|
else:
|
||||||
|
# hourly_consumption_data contains the last 30 days
|
||||||
|
# of consumption data.
|
||||||
|
# We update the statistics with the last 30 days
|
||||||
|
# of data to handle corrections in the data.
|
||||||
|
hourly_consumption_data = home.hourly_consumption_data
|
||||||
|
|
||||||
_sum += data["consumption"]
|
start = dt_util.parse_datetime(
|
||||||
|
hourly_consumption_data[0]["from"]
|
||||||
statistics.append(
|
) - timedelta(hours=1)
|
||||||
StatisticData(
|
stat = await self.hass.async_add_executor_job(
|
||||||
start=start,
|
statistics_during_period,
|
||||||
state=data["consumption"],
|
self.hass,
|
||||||
sum=_sum,
|
start,
|
||||||
|
None,
|
||||||
|
[statistic_id],
|
||||||
|
"hour",
|
||||||
|
True,
|
||||||
)
|
)
|
||||||
)
|
_sum = stat[statistic_id][0]["sum"]
|
||||||
|
last_stats_time = stat[statistic_id][0]["start"]
|
||||||
|
|
||||||
metadata = StatisticMetaData(
|
statistics = []
|
||||||
has_mean=False,
|
|
||||||
has_sum=True,
|
for data in hourly_consumption_data:
|
||||||
name=f"{home.name} consumption",
|
if data.get(sensor_type) is None:
|
||||||
source=TIBBER_DOMAIN,
|
continue
|
||||||
statistic_id=statistic_id,
|
|
||||||
unit_of_measurement=ENERGY_KILO_WATT_HOUR,
|
start = dt_util.parse_datetime(data["from"])
|
||||||
)
|
if last_stats_time is not None and start <= last_stats_time:
|
||||||
async_add_external_statistics(self.hass, metadata, statistics)
|
continue
|
||||||
|
|
||||||
|
_sum += data[sensor_type]
|
||||||
|
|
||||||
|
statistics.append(
|
||||||
|
StatisticData(
|
||||||
|
start=start,
|
||||||
|
state=data[sensor_type],
|
||||||
|
sum=_sum,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if sensor_type == "consumption":
|
||||||
|
unit = ENERGY_KILO_WATT_HOUR
|
||||||
|
else:
|
||||||
|
unit = home.currency
|
||||||
|
metadata = StatisticMetaData(
|
||||||
|
has_mean=False,
|
||||||
|
has_sum=True,
|
||||||
|
name=f"{home.name} {sensor_type}",
|
||||||
|
source=TIBBER_DOMAIN,
|
||||||
|
statistic_id=statistic_id,
|
||||||
|
unit_of_measurement=unit,
|
||||||
|
)
|
||||||
|
async_add_external_statistics(self.hass, metadata, statistics)
|
||||||
|
@ -33,7 +33,9 @@ async def test_async_setup_entry(hass):
|
|||||||
|
|
||||||
def _get_homes():
|
def _get_homes():
|
||||||
tibber_home = AsyncMock()
|
tibber_home = AsyncMock()
|
||||||
|
tibber_home.name = "Name"
|
||||||
tibber_home.home_id = "home_id"
|
tibber_home.home_id = "home_id"
|
||||||
|
tibber_home.currency = "NOK"
|
||||||
tibber_home.get_historic_data.return_value = _CONSUMPTION_DATA_1
|
tibber_home.get_historic_data.return_value = _CONSUMPTION_DATA_1
|
||||||
return [tibber_home]
|
return [tibber_home]
|
||||||
|
|
||||||
@ -46,6 +48,7 @@ async def test_async_setup_entry(hass):
|
|||||||
await coordinator._async_update_data()
|
await coordinator._async_update_data()
|
||||||
await async_wait_recording_done_without_instance(hass)
|
await async_wait_recording_done_without_instance(hass)
|
||||||
|
|
||||||
|
# Validate consumption
|
||||||
statistic_id = "tibber:energy_consumption_home_id"
|
statistic_id = "tibber:energy_consumption_home_id"
|
||||||
|
|
||||||
stats = await hass.async_add_executor_job(
|
stats = await hass.async_add_executor_job(
|
||||||
@ -71,3 +74,30 @@ async def test_async_setup_entry(hass):
|
|||||||
|
|
||||||
_sum += _CONSUMPTION_DATA_1[k]["consumption"]
|
_sum += _CONSUMPTION_DATA_1[k]["consumption"]
|
||||||
assert stat["sum"] == _sum
|
assert stat["sum"] == _sum
|
||||||
|
|
||||||
|
# Validate cost
|
||||||
|
statistic_id = "tibber:energy_totalcost_home_id"
|
||||||
|
|
||||||
|
stats = await hass.async_add_executor_job(
|
||||||
|
statistics_during_period,
|
||||||
|
hass,
|
||||||
|
dt_util.parse_datetime(_CONSUMPTION_DATA_1[0]["from"]),
|
||||||
|
None,
|
||||||
|
[statistic_id],
|
||||||
|
"hour",
|
||||||
|
True,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert len(stats) == 1
|
||||||
|
assert len(stats[statistic_id]) == 3
|
||||||
|
_sum = 0
|
||||||
|
for k, stat in enumerate(stats[statistic_id]):
|
||||||
|
assert stat["start"] == dt_util.parse_datetime(_CONSUMPTION_DATA_1[k]["from"])
|
||||||
|
assert stat["state"] == _CONSUMPTION_DATA_1[k]["totalCost"]
|
||||||
|
assert stat["mean"] is None
|
||||||
|
assert stat["min"] is None
|
||||||
|
assert stat["max"] is None
|
||||||
|
assert stat["last_reset"] is None
|
||||||
|
|
||||||
|
_sum += _CONSUMPTION_DATA_1[k]["totalCost"]
|
||||||
|
assert stat["sum"] == _sum
|
||||||
|
Loading…
x
Reference in New Issue
Block a user