mirror of
https://github.com/home-assistant/core.git
synced 2025-07-16 01:37:08 +00:00
Convert history_stats to use a coordinator (#70388)
Co-authored-by: Paulus Schoutsen <paulus@home-assistant.io>
This commit is contained in:
parent
c1d2017988
commit
1d2c949c51
96
homeassistant/components/history_stats/coordinator.py
Normal file
96
homeassistant/components/history_stats/coordinator.py
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
"""History stats data coordinator."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import timedelta
|
||||||
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant, callback
|
||||||
|
from homeassistant.exceptions import TemplateError
|
||||||
|
from homeassistant.helpers.event import async_track_state_change_event
|
||||||
|
from homeassistant.helpers.start import async_at_start
|
||||||
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
|
|
||||||
|
from .data import HistoryStats, HistoryStatsState
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
UPDATE_INTERVAL = timedelta(minutes=1)
|
||||||
|
|
||||||
|
|
||||||
|
class HistoryStatsUpdateCoordinator(DataUpdateCoordinator):
|
||||||
|
"""DataUpdateCoordinator to gather data for a specific TPLink device."""
|
||||||
|
|
||||||
|
data: HistoryStatsState
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
hass: HomeAssistant,
|
||||||
|
history_stats: HistoryStats,
|
||||||
|
name: str,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize DataUpdateCoordinator."""
|
||||||
|
self._history_stats = history_stats
|
||||||
|
self._subscriber_count = 0
|
||||||
|
self._at_start_listener: CALLBACK_TYPE | None = None
|
||||||
|
self._track_events_listener: CALLBACK_TYPE | None = None
|
||||||
|
super().__init__(
|
||||||
|
hass,
|
||||||
|
_LOGGER,
|
||||||
|
name=name,
|
||||||
|
update_interval=UPDATE_INTERVAL,
|
||||||
|
)
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def async_setup_state_listener(self) -> CALLBACK_TYPE:
|
||||||
|
"""Set up listeners and return a callback to cancel them."""
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def remove_listener() -> None:
|
||||||
|
"""Remove update listener."""
|
||||||
|
self._subscriber_count -= 1
|
||||||
|
if self._subscriber_count == 0:
|
||||||
|
self._async_remove_listener()
|
||||||
|
|
||||||
|
if self._subscriber_count == 0:
|
||||||
|
self._async_add_listener()
|
||||||
|
self._subscriber_count += 1
|
||||||
|
|
||||||
|
return remove_listener
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _async_remove_listener(self) -> None:
|
||||||
|
"""Remove state change listener."""
|
||||||
|
if self._track_events_listener:
|
||||||
|
self._track_events_listener()
|
||||||
|
self._track_events_listener = None
|
||||||
|
if self._at_start_listener:
|
||||||
|
self._at_start_listener()
|
||||||
|
self._at_start_listener = None
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _async_add_listener(self) -> None:
|
||||||
|
"""Add a listener to start tracking state changes after start."""
|
||||||
|
self._at_start_listener = async_at_start(
|
||||||
|
self.hass, self._async_add_events_listener
|
||||||
|
)
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _async_add_events_listener(self, *_: Any) -> None:
|
||||||
|
"""Handle hass starting and start tracking events."""
|
||||||
|
self._at_start_listener = None
|
||||||
|
self._track_events_listener = async_track_state_change_event(
|
||||||
|
self.hass, [self._history_stats.entity_id], self._async_update_from_event
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _async_update_from_event(self, event: Event) -> None:
|
||||||
|
"""Process an update from an event."""
|
||||||
|
self.async_set_updated_data(await self._history_stats.async_update(event))
|
||||||
|
|
||||||
|
async def _async_update_data(self) -> HistoryStatsState:
|
||||||
|
"""Fetch update the history stats state."""
|
||||||
|
try:
|
||||||
|
return await self._history_stats.async_update(None)
|
||||||
|
except (TemplateError, TypeError, ValueError) as ex:
|
||||||
|
raise UpdateFailed(ex) from ex
|
175
homeassistant/components/history_stats/data.py
Normal file
175
homeassistant/components/history_stats/data.py
Normal file
@ -0,0 +1,175 @@
|
|||||||
|
"""Manage the history_stats data."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
from homeassistant.components.recorder import get_instance, history
|
||||||
|
from homeassistant.core import Event, HomeAssistant, State
|
||||||
|
from homeassistant.helpers.template import Template
|
||||||
|
import homeassistant.util.dt as dt_util
|
||||||
|
|
||||||
|
from .helpers import async_calculate_period, floored_timestamp
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class HistoryStatsState:
|
||||||
|
"""The current stats of the history stats."""
|
||||||
|
|
||||||
|
hours_matched: float | None
|
||||||
|
changes_to_match_state: int | None
|
||||||
|
period: tuple[datetime.datetime, datetime.datetime]
|
||||||
|
|
||||||
|
|
||||||
|
class HistoryStats:
|
||||||
|
"""Manage history stats."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entity_id: str,
|
||||||
|
entity_states: list[str],
|
||||||
|
start: Template | None,
|
||||||
|
end: Template | None,
|
||||||
|
duration: datetime.timedelta | None,
|
||||||
|
) -> None:
|
||||||
|
"""Init the history stats manager."""
|
||||||
|
self.hass = hass
|
||||||
|
self.entity_id = entity_id
|
||||||
|
self._period = (datetime.datetime.min, datetime.datetime.min)
|
||||||
|
self._state: HistoryStatsState = HistoryStatsState(None, None, self._period)
|
||||||
|
self._history_current_period: list[State] = []
|
||||||
|
self._previous_run_before_start = False
|
||||||
|
self._entity_states = set(entity_states)
|
||||||
|
self._duration = duration
|
||||||
|
self._start = start
|
||||||
|
self._end = end
|
||||||
|
|
||||||
|
async def async_update(self, event: Event | None) -> HistoryStatsState:
|
||||||
|
"""Update the stats at a given time."""
|
||||||
|
# Get previous values of start and end
|
||||||
|
previous_period_start, previous_period_end = self._period
|
||||||
|
# Parse templates
|
||||||
|
self._period = async_calculate_period(self._duration, self._start, self._end)
|
||||||
|
# Get the current period
|
||||||
|
current_period_start, current_period_end = self._period
|
||||||
|
|
||||||
|
# Convert times to UTC
|
||||||
|
current_period_start = dt_util.as_utc(current_period_start)
|
||||||
|
current_period_end = dt_util.as_utc(current_period_end)
|
||||||
|
previous_period_start = dt_util.as_utc(previous_period_start)
|
||||||
|
previous_period_end = dt_util.as_utc(previous_period_end)
|
||||||
|
|
||||||
|
# Compute integer timestamps
|
||||||
|
current_period_start_timestamp = floored_timestamp(current_period_start)
|
||||||
|
current_period_end_timestamp = floored_timestamp(current_period_end)
|
||||||
|
previous_period_start_timestamp = floored_timestamp(previous_period_start)
|
||||||
|
previous_period_end_timestamp = floored_timestamp(previous_period_end)
|
||||||
|
now_timestamp = floored_timestamp(datetime.datetime.now())
|
||||||
|
|
||||||
|
if now_timestamp < current_period_start_timestamp:
|
||||||
|
# History cannot tell the future
|
||||||
|
self._history_current_period = []
|
||||||
|
self._previous_run_before_start = True
|
||||||
|
|
||||||
|
#
|
||||||
|
# We avoid querying the database if the below did NOT happen:
|
||||||
|
#
|
||||||
|
# - The previous run happened before the start time
|
||||||
|
# - The start time changed
|
||||||
|
# - The period shrank in size
|
||||||
|
# - The previous period ended before now
|
||||||
|
#
|
||||||
|
elif (
|
||||||
|
not self._previous_run_before_start
|
||||||
|
and current_period_start_timestamp == previous_period_start_timestamp
|
||||||
|
and (
|
||||||
|
current_period_end_timestamp == previous_period_end_timestamp
|
||||||
|
or (
|
||||||
|
current_period_end_timestamp >= previous_period_end_timestamp
|
||||||
|
and previous_period_end_timestamp <= now_timestamp
|
||||||
|
)
|
||||||
|
)
|
||||||
|
):
|
||||||
|
new_data = False
|
||||||
|
if event and event.data["new_state"] is not None:
|
||||||
|
new_state: State = event.data["new_state"]
|
||||||
|
if current_period_start <= new_state.last_changed <= current_period_end:
|
||||||
|
self._history_current_period.append(new_state)
|
||||||
|
new_data = True
|
||||||
|
if not new_data and current_period_end_timestamp < now_timestamp:
|
||||||
|
# If period has not changed and current time after the period end...
|
||||||
|
# Don't compute anything as the value cannot have changed
|
||||||
|
return self._state
|
||||||
|
else:
|
||||||
|
self._history_current_period = await get_instance(
|
||||||
|
self.hass
|
||||||
|
).async_add_executor_job(
|
||||||
|
self._update_from_database,
|
||||||
|
current_period_start,
|
||||||
|
current_period_end,
|
||||||
|
)
|
||||||
|
self._previous_run_before_start = False
|
||||||
|
|
||||||
|
if not self._history_current_period:
|
||||||
|
self._state = HistoryStatsState(None, None, self._period)
|
||||||
|
return self._state
|
||||||
|
|
||||||
|
hours_matched, changes_to_match_state = self._async_compute_hours_and_changes(
|
||||||
|
now_timestamp,
|
||||||
|
current_period_start_timestamp,
|
||||||
|
current_period_end_timestamp,
|
||||||
|
)
|
||||||
|
self._state = HistoryStatsState(
|
||||||
|
hours_matched, changes_to_match_state, self._period
|
||||||
|
)
|
||||||
|
return self._state
|
||||||
|
|
||||||
|
def _update_from_database(
|
||||||
|
self, start: datetime.datetime, end: datetime.datetime
|
||||||
|
) -> list[State]:
|
||||||
|
return history.state_changes_during_period(
|
||||||
|
self.hass,
|
||||||
|
start,
|
||||||
|
end,
|
||||||
|
self.entity_id,
|
||||||
|
include_start_time_state=True,
|
||||||
|
no_attributes=True,
|
||||||
|
).get(self.entity_id, [])
|
||||||
|
|
||||||
|
def _async_compute_hours_and_changes(
|
||||||
|
self, now_timestamp: float, start_timestamp: float, end_timestamp: float
|
||||||
|
) -> tuple[float, int]:
|
||||||
|
"""Compute the hours matched and changes from the history list and first state."""
|
||||||
|
# state_changes_during_period is called with include_start_time_state=True
|
||||||
|
# which is the default and always provides the state at the start
|
||||||
|
# of the period
|
||||||
|
previous_state_matches = (
|
||||||
|
self._history_current_period
|
||||||
|
and self._history_current_period[0].state in self._entity_states
|
||||||
|
)
|
||||||
|
last_state_change_timestamp = start_timestamp
|
||||||
|
elapsed = 0.0
|
||||||
|
changes_to_match_state = 0
|
||||||
|
|
||||||
|
# Make calculations
|
||||||
|
for item in self._history_current_period:
|
||||||
|
current_state_matches = item.state in self._entity_states
|
||||||
|
state_change_timestamp = item.last_changed.timestamp()
|
||||||
|
|
||||||
|
if previous_state_matches:
|
||||||
|
elapsed += state_change_timestamp - last_state_change_timestamp
|
||||||
|
elif current_state_matches:
|
||||||
|
changes_to_match_state += 1
|
||||||
|
|
||||||
|
previous_state_matches = current_state_matches
|
||||||
|
last_state_change_timestamp = state_change_timestamp
|
||||||
|
|
||||||
|
# Count time elapsed between last history state and end of measure
|
||||||
|
if previous_state_matches:
|
||||||
|
measure_end = min(end_timestamp, now_timestamp)
|
||||||
|
elapsed += measure_end - last_state_change_timestamp
|
||||||
|
|
||||||
|
# Save value in hours
|
||||||
|
hours_matched = elapsed / 3600
|
||||||
|
return hours_matched, changes_to_match_state
|
@ -13,51 +13,51 @@ import homeassistant.util.dt as dt_util
|
|||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
DURATION_START = "start"
|
||||||
|
DURATION_END = "end"
|
||||||
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_calculate_period(
|
def async_calculate_period(
|
||||||
duration: datetime.timedelta | None,
|
duration: datetime.timedelta | None,
|
||||||
start_template: Template | None,
|
start_template: Template | None,
|
||||||
end_template: Template | None,
|
end_template: Template | None,
|
||||||
) -> tuple[datetime.datetime, datetime.datetime] | None:
|
) -> tuple[datetime.datetime, datetime.datetime]:
|
||||||
"""Parse the templates and return the period."""
|
"""Parse the templates and return the period."""
|
||||||
start: datetime.datetime | None = None
|
bounds: dict[str, datetime.datetime | None] = {
|
||||||
end: datetime.datetime | None = None
|
DURATION_START: None,
|
||||||
|
DURATION_END: None,
|
||||||
# Parse start
|
}
|
||||||
if start_template is not None:
|
for bound, template in (
|
||||||
|
(DURATION_START, start_template),
|
||||||
|
(DURATION_END, end_template),
|
||||||
|
):
|
||||||
|
# Parse start
|
||||||
|
if template is None:
|
||||||
|
continue
|
||||||
try:
|
try:
|
||||||
start_rendered = start_template.async_render()
|
rendered = template.async_render()
|
||||||
except (TemplateError, TypeError) as ex:
|
except (TemplateError, TypeError) as ex:
|
||||||
HistoryStatsHelper.handle_template_exception(ex, "start")
|
if ex.args and not ex.args[0].startswith(
|
||||||
return None
|
"UndefinedError: 'None' has no attribute"
|
||||||
if isinstance(start_rendered, str):
|
):
|
||||||
start = dt_util.parse_datetime(start_rendered)
|
_LOGGER.error("Error parsing template for field %s", bound, exc_info=ex)
|
||||||
if start is None:
|
raise
|
||||||
try:
|
if isinstance(rendered, str):
|
||||||
start = dt_util.as_local(
|
bounds[bound] = dt_util.parse_datetime(rendered)
|
||||||
dt_util.utc_from_timestamp(math.floor(float(start_rendered)))
|
if bounds[bound] is not None:
|
||||||
)
|
continue
|
||||||
except ValueError:
|
|
||||||
_LOGGER.error("Parsing error: start must be a datetime or a timestamp")
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Parse end
|
|
||||||
if end_template is not None:
|
|
||||||
try:
|
try:
|
||||||
end_rendered = end_template.async_render()
|
bounds[bound] = dt_util.as_local(
|
||||||
except (TemplateError, TypeError) as ex:
|
dt_util.utc_from_timestamp(math.floor(float(rendered)))
|
||||||
HistoryStatsHelper.handle_template_exception(ex, "end")
|
)
|
||||||
return None
|
except ValueError as ex:
|
||||||
if isinstance(end_rendered, str):
|
raise ValueError(
|
||||||
end = dt_util.parse_datetime(end_rendered)
|
f"Parsing error: {bound} must be a datetime or a timestamp: {ex}"
|
||||||
if end is None:
|
) from ex
|
||||||
try:
|
|
||||||
end = dt_util.as_local(
|
start = bounds[DURATION_START]
|
||||||
dt_util.utc_from_timestamp(math.floor(float(end_rendered)))
|
end = bounds[DURATION_END]
|
||||||
)
|
|
||||||
except ValueError:
|
|
||||||
_LOGGER.error("Parsing error: end must be a datetime or a timestamp")
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Calculate start or end using the duration
|
# Calculate start or end using the duration
|
||||||
if start is None:
|
if start is None:
|
||||||
@ -72,39 +72,28 @@ def async_calculate_period(
|
|||||||
return start, end
|
return start, end
|
||||||
|
|
||||||
|
|
||||||
class HistoryStatsHelper:
|
def pretty_duration(hours: float) -> str:
|
||||||
"""Static methods to make the HistoryStatsSensor code lighter."""
|
"""Format a duration in days, hours, minutes, seconds."""
|
||||||
|
seconds = int(3600 * hours)
|
||||||
|
days, seconds = divmod(seconds, 86400)
|
||||||
|
hours, seconds = divmod(seconds, 3600)
|
||||||
|
minutes, seconds = divmod(seconds, 60)
|
||||||
|
if days > 0:
|
||||||
|
return "%dd %dh %dm" % (days, hours, minutes)
|
||||||
|
if hours > 0:
|
||||||
|
return "%dh %dm" % (hours, minutes)
|
||||||
|
return "%dm" % minutes
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def pretty_duration(hours):
|
|
||||||
"""Format a duration in days, hours, minutes, seconds."""
|
|
||||||
seconds = int(3600 * hours)
|
|
||||||
days, seconds = divmod(seconds, 86400)
|
|
||||||
hours, seconds = divmod(seconds, 3600)
|
|
||||||
minutes, seconds = divmod(seconds, 60)
|
|
||||||
if days > 0:
|
|
||||||
return "%dd %dh %dm" % (days, hours, minutes)
|
|
||||||
if hours > 0:
|
|
||||||
return "%dh %dm" % (hours, minutes)
|
|
||||||
return "%dm" % minutes
|
|
||||||
|
|
||||||
@staticmethod
|
def pretty_ratio(
|
||||||
def pretty_ratio(value, period):
|
value: float, period: tuple[datetime.datetime, datetime.datetime]
|
||||||
"""Format the ratio of value / period duration."""
|
) -> float:
|
||||||
if len(period) != 2 or period[0] == period[1]:
|
"""Format the ratio of value / period duration."""
|
||||||
return 0.0
|
if len(period) != 2 or period[0] == period[1]:
|
||||||
|
return 0.0
|
||||||
|
|
||||||
ratio = 100 * 3600 * value / (period[1] - period[0]).total_seconds()
|
ratio = 100 * 3600 * value / (period[1] - period[0]).total_seconds()
|
||||||
return round(ratio, 1)
|
return round(ratio, 1)
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def handle_template_exception(ex, field):
|
|
||||||
"""Log an error nicely if the template cannot be interpreted."""
|
|
||||||
if ex.args and ex.args[0].startswith("UndefinedError: 'None' has no attribute"):
|
|
||||||
# Common during HA startup - so just a warning
|
|
||||||
_LOGGER.warning(ex)
|
|
||||||
return
|
|
||||||
_LOGGER.error("Error parsing template for field %s", field, exc_info=ex)
|
|
||||||
|
|
||||||
|
|
||||||
def floored_timestamp(incoming_dt: datetime.datetime) -> float:
|
def floored_timestamp(incoming_dt: datetime.datetime) -> float:
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
"""Component to make instant statistics about your history."""
|
"""Component to make instant statistics about your history."""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from abc import abstractmethod
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.components.recorder import get_instance, history
|
|
||||||
from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity
|
from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
CONF_ENTITY_ID,
|
CONF_ENTITY_ID,
|
||||||
@ -15,18 +15,18 @@ from homeassistant.const import (
|
|||||||
PERCENTAGE,
|
PERCENTAGE,
|
||||||
TIME_HOURS,
|
TIME_HOURS,
|
||||||
)
|
)
|
||||||
from homeassistant.core import Event, HomeAssistant, State, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
import homeassistant.helpers.config_validation as cv
|
import homeassistant.helpers.config_validation as cv
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
from homeassistant.helpers.event import async_track_state_change_event
|
|
||||||
from homeassistant.helpers.reload import async_setup_reload_service
|
from homeassistant.helpers.reload import async_setup_reload_service
|
||||||
from homeassistant.helpers.start import async_at_start
|
|
||||||
from homeassistant.helpers.template import Template
|
from homeassistant.helpers.template import Template
|
||||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||||
import homeassistant.util.dt as dt_util
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
from . import DOMAIN, PLATFORMS
|
from . import DOMAIN, PLATFORMS
|
||||||
from .helpers import HistoryStatsHelper, async_calculate_period, floored_timestamp
|
from .coordinator import HistoryStatsUpdateCoordinator
|
||||||
|
from .data import HistoryStats
|
||||||
|
from .helpers import pretty_duration, pretty_ratio
|
||||||
|
|
||||||
CONF_START = "start"
|
CONF_START = "start"
|
||||||
CONF_END = "end"
|
CONF_END = "end"
|
||||||
@ -96,214 +96,72 @@ async def async_setup_platform(
|
|||||||
if template is not None:
|
if template is not None:
|
||||||
template.hass = hass
|
template.hass = hass
|
||||||
|
|
||||||
async_add_entities(
|
history_stats = HistoryStats(hass, entity_id, entity_states, start, end, duration)
|
||||||
[
|
coordinator = HistoryStatsUpdateCoordinator(hass, history_stats, name)
|
||||||
HistoryStatsSensor(
|
async_add_entities([HistoryStatsSensor(coordinator, sensor_type, name)])
|
||||||
entity_id, entity_states, start, end, duration, sensor_type, name
|
|
||||||
)
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class HistoryStatsSensor(SensorEntity):
|
class HistoryStatsSensorBase(
|
||||||
"""Representation of a HistoryStats sensor."""
|
CoordinatorEntity[HistoryStatsUpdateCoordinator], SensorEntity
|
||||||
|
):
|
||||||
|
"""Base class for a HistoryStats sensor."""
|
||||||
|
|
||||||
_attr_icon = ICON
|
_attr_icon = ICON
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
entity_id: str,
|
coordinator: HistoryStatsUpdateCoordinator,
|
||||||
entity_states: list[str],
|
name: str,
|
||||||
start: Template | None,
|
) -> None:
|
||||||
end: Template | None,
|
"""Initialize the HistoryStats sensor base class."""
|
||||||
duration: datetime.timedelta | None,
|
super().__init__(coordinator)
|
||||||
|
self._attr_name = name
|
||||||
|
|
||||||
|
async def async_added_to_hass(self) -> None:
|
||||||
|
"""Entity has been added to hass."""
|
||||||
|
await super().async_added_to_hass()
|
||||||
|
self.async_on_remove(self.coordinator.async_setup_state_listener())
|
||||||
|
|
||||||
|
def _handle_coordinator_update(self) -> None:
|
||||||
|
"""Set attrs from value and count."""
|
||||||
|
self._process_update()
|
||||||
|
super()._handle_coordinator_update()
|
||||||
|
|
||||||
|
@callback
|
||||||
|
@abstractmethod
|
||||||
|
def _process_update(self) -> None:
|
||||||
|
"""Process an update from the coordinator."""
|
||||||
|
|
||||||
|
|
||||||
|
class HistoryStatsSensor(HistoryStatsSensorBase):
|
||||||
|
"""A HistoryStats sensor."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
coordinator: HistoryStatsUpdateCoordinator,
|
||||||
sensor_type: str,
|
sensor_type: str,
|
||||||
name: str,
|
name: str,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the HistoryStats sensor."""
|
"""Initialize the HistoryStats sensor."""
|
||||||
self._attr_name = name
|
super().__init__(coordinator, name)
|
||||||
self._attr_native_unit_of_measurement = UNITS[sensor_type]
|
self._attr_native_unit_of_measurement = UNITS[sensor_type]
|
||||||
|
|
||||||
self._entity_id = entity_id
|
|
||||||
self._entity_states = set(entity_states)
|
|
||||||
self._duration = duration
|
|
||||||
self._start = start
|
|
||||||
self._end = end
|
|
||||||
self._type = sensor_type
|
self._type = sensor_type
|
||||||
self._period = (datetime.datetime.min, datetime.datetime.min)
|
|
||||||
|
|
||||||
self._history_current_period: list[State] = []
|
|
||||||
self._previous_run_before_start = False
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _async_start_refresh(self, *_) -> None:
|
def _process_update(self) -> None:
|
||||||
"""Register state tracking."""
|
"""Process an update from the coordinator."""
|
||||||
self.async_schedule_update_ha_state(True)
|
state = self.coordinator.data
|
||||||
self.async_on_remove(
|
if state is None or state.hours_matched is None:
|
||||||
async_track_state_change_event(
|
|
||||||
self.hass, [self._entity_id], self._async_update_from_event
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
async def async_added_to_hass(self):
|
|
||||||
"""Create listeners when the entity is added."""
|
|
||||||
self.async_on_remove(async_at_start(self.hass, self._async_start_refresh))
|
|
||||||
|
|
||||||
async def async_update(self) -> None:
|
|
||||||
"""Get the latest data and updates the states."""
|
|
||||||
await self._async_update(None)
|
|
||||||
|
|
||||||
async def _async_update_from_event(self, event: Event) -> None:
|
|
||||||
"""Do an update and write the state if its changed."""
|
|
||||||
await self._async_update(event)
|
|
||||||
self.async_write_ha_state()
|
|
||||||
|
|
||||||
async def _async_update(self, event: Event | None) -> None:
|
|
||||||
"""Process an update."""
|
|
||||||
# Get previous values of start and end
|
|
||||||
previous_period_start, previous_period_end = self._period
|
|
||||||
# Parse templates
|
|
||||||
self.update_period()
|
|
||||||
current_period_start, current_period_end = self._period
|
|
||||||
|
|
||||||
# Convert times to UTC
|
|
||||||
current_period_start = dt_util.as_utc(current_period_start)
|
|
||||||
current_period_end = dt_util.as_utc(current_period_end)
|
|
||||||
previous_period_start = dt_util.as_utc(previous_period_start)
|
|
||||||
previous_period_end = dt_util.as_utc(previous_period_end)
|
|
||||||
|
|
||||||
# Compute integer timestamps
|
|
||||||
current_period_start_timestamp = floored_timestamp(current_period_start)
|
|
||||||
current_period_end_timestamp = floored_timestamp(current_period_end)
|
|
||||||
previous_period_start_timestamp = floored_timestamp(previous_period_start)
|
|
||||||
previous_period_end_timestamp = floored_timestamp(previous_period_end)
|
|
||||||
now_timestamp = floored_timestamp(datetime.datetime.now())
|
|
||||||
|
|
||||||
if now_timestamp < current_period_start_timestamp:
|
|
||||||
# History cannot tell the future
|
|
||||||
self._history_current_period = []
|
|
||||||
self._previous_run_before_start = True
|
|
||||||
#
|
|
||||||
# We avoid querying the database if the below did NOT happen:
|
|
||||||
#
|
|
||||||
# - The previous run happened before the start time
|
|
||||||
# - The start time changed
|
|
||||||
# - The period shrank in size
|
|
||||||
# - The previous period ended before now
|
|
||||||
#
|
|
||||||
elif (
|
|
||||||
not self._previous_run_before_start
|
|
||||||
and current_period_start_timestamp == previous_period_start_timestamp
|
|
||||||
and (
|
|
||||||
current_period_end_timestamp == previous_period_end_timestamp
|
|
||||||
or (
|
|
||||||
current_period_end_timestamp >= previous_period_end_timestamp
|
|
||||||
and previous_period_end_timestamp <= now_timestamp
|
|
||||||
)
|
|
||||||
)
|
|
||||||
):
|
|
||||||
new_data = False
|
|
||||||
if event and event.data["new_state"] is not None:
|
|
||||||
new_state: State = event.data["new_state"]
|
|
||||||
if current_period_start <= new_state.last_changed <= current_period_end:
|
|
||||||
self._history_current_period.append(new_state)
|
|
||||||
new_data = True
|
|
||||||
if not new_data and current_period_end_timestamp < now_timestamp:
|
|
||||||
# If period has not changed and current time after the period end...
|
|
||||||
# Don't compute anything as the value cannot have changed
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
self._history_current_period = await get_instance(
|
|
||||||
self.hass
|
|
||||||
).async_add_executor_job(
|
|
||||||
self._update_from_database,
|
|
||||||
current_period_start,
|
|
||||||
current_period_end,
|
|
||||||
)
|
|
||||||
self._previous_run_before_start = False
|
|
||||||
|
|
||||||
if not self._history_current_period:
|
|
||||||
self._async_set_native_value(None, None)
|
|
||||||
return
|
|
||||||
|
|
||||||
hours_matched, changes_to_match_state = self._async_compute_hours_and_changes(
|
|
||||||
now_timestamp,
|
|
||||||
current_period_start_timestamp,
|
|
||||||
current_period_end_timestamp,
|
|
||||||
)
|
|
||||||
self._async_set_native_value(hours_matched, changes_to_match_state)
|
|
||||||
|
|
||||||
def _update_from_database(
|
|
||||||
self, start: datetime.datetime, end: datetime.datetime
|
|
||||||
) -> list[State]:
|
|
||||||
return history.state_changes_during_period(
|
|
||||||
self.hass,
|
|
||||||
start,
|
|
||||||
end,
|
|
||||||
self._entity_id,
|
|
||||||
include_start_time_state=True,
|
|
||||||
no_attributes=True,
|
|
||||||
).get(self._entity_id, [])
|
|
||||||
|
|
||||||
def _async_compute_hours_and_changes(
|
|
||||||
self, now_timestamp: float, start_timestamp: float, end_timestamp: float
|
|
||||||
) -> tuple[float, int]:
|
|
||||||
"""Compute the hours matched and changes from the history list and first state."""
|
|
||||||
# state_changes_during_period is called with include_start_time_state=True
|
|
||||||
# which is the default and always provides the state at the start
|
|
||||||
# of the period
|
|
||||||
previous_state_matches = (
|
|
||||||
self._history_current_period
|
|
||||||
and self._history_current_period[0].state in self._entity_states
|
|
||||||
)
|
|
||||||
last_state_change_timestamp = start_timestamp
|
|
||||||
elapsed = 0.0
|
|
||||||
changes_to_match_state = 0
|
|
||||||
|
|
||||||
# Make calculations
|
|
||||||
for item in self._history_current_period:
|
|
||||||
current_state_matches = item.state in self._entity_states
|
|
||||||
state_change_timestamp = item.last_changed.timestamp()
|
|
||||||
|
|
||||||
if previous_state_matches:
|
|
||||||
elapsed += state_change_timestamp - last_state_change_timestamp
|
|
||||||
elif current_state_matches:
|
|
||||||
changes_to_match_state += 1
|
|
||||||
|
|
||||||
previous_state_matches = current_state_matches
|
|
||||||
last_state_change_timestamp = state_change_timestamp
|
|
||||||
|
|
||||||
# Count time elapsed between last history state and end of measure
|
|
||||||
if previous_state_matches:
|
|
||||||
measure_end = min(end_timestamp, now_timestamp)
|
|
||||||
elapsed += measure_end - last_state_change_timestamp
|
|
||||||
|
|
||||||
# Save value in hours
|
|
||||||
hours_matched = elapsed / 3600
|
|
||||||
return hours_matched, changes_to_match_state
|
|
||||||
|
|
||||||
def _async_set_native_value(
|
|
||||||
self, hours_matched: float | None, changes_to_match_state: int | None
|
|
||||||
) -> None:
|
|
||||||
"""Set attrs from value and count."""
|
|
||||||
if hours_matched is None:
|
|
||||||
self._attr_native_value = None
|
self._attr_native_value = None
|
||||||
self._attr_extra_state_attributes = {}
|
self._attr_extra_state_attributes = {}
|
||||||
return
|
return
|
||||||
|
|
||||||
if self._type == CONF_TYPE_TIME:
|
if self._type == CONF_TYPE_TIME:
|
||||||
self._attr_native_value = round(hours_matched, 2)
|
self._attr_native_value = round(state.hours_matched, 2)
|
||||||
elif self._type == CONF_TYPE_RATIO:
|
elif self._type == CONF_TYPE_RATIO:
|
||||||
self._attr_native_value = HistoryStatsHelper.pretty_ratio(
|
self._attr_native_value = pretty_ratio(state.hours_matched, state.period)
|
||||||
hours_matched, self._period
|
|
||||||
)
|
|
||||||
elif self._type == CONF_TYPE_COUNT:
|
elif self._type == CONF_TYPE_COUNT:
|
||||||
self._attr_native_value = changes_to_match_state
|
self._attr_native_value = state.changes_to_match_state
|
||||||
self._attr_extra_state_attributes = {
|
self._attr_extra_state_attributes = {
|
||||||
ATTR_VALUE: HistoryStatsHelper.pretty_duration(hours_matched)
|
ATTR_VALUE: pretty_duration(state.hours_matched)
|
||||||
}
|
}
|
||||||
|
|
||||||
def update_period(self) -> None:
|
|
||||||
"""Parse the templates and store a datetime tuple in _period."""
|
|
||||||
if new_period := async_calculate_period(self._duration, self._start, self._end):
|
|
||||||
self._period = new_period
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
"""The test for the History Statistics sensor platform."""
|
"""The test for the History Statistics sensor platform."""
|
||||||
# pylint: disable=protected-access
|
# pylint: disable=protected-access
|
||||||
from datetime import datetime, timedelta
|
from datetime import timedelta
|
||||||
import unittest
|
import unittest
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
@ -9,11 +9,9 @@ import pytest
|
|||||||
|
|
||||||
from homeassistant import config as hass_config
|
from homeassistant import config as hass_config
|
||||||
from homeassistant.components.history_stats import DOMAIN
|
from homeassistant.components.history_stats import DOMAIN
|
||||||
from homeassistant.components.history_stats.sensor import HistoryStatsSensor
|
from homeassistant.const import SERVICE_RELOAD, STATE_UNAVAILABLE, STATE_UNKNOWN
|
||||||
from homeassistant.const import SERVICE_RELOAD, STATE_UNKNOWN
|
|
||||||
import homeassistant.core as ha
|
import homeassistant.core as ha
|
||||||
from homeassistant.helpers.entity_component import async_update_entity
|
from homeassistant.helpers.entity_component import async_update_entity
|
||||||
from homeassistant.helpers.template import Template
|
|
||||||
from homeassistant.setup import async_setup_component, setup_component
|
from homeassistant.setup import async_setup_component, setup_component
|
||||||
import homeassistant.util.dt as dt_util
|
import homeassistant.util.dt as dt_util
|
||||||
|
|
||||||
@ -75,73 +73,6 @@ class TestHistoryStatsSensor(unittest.TestCase):
|
|||||||
state = self.hass.states.get("sensor.test")
|
state = self.hass.states.get("sensor.test")
|
||||||
assert state.state == STATE_UNKNOWN
|
assert state.state == STATE_UNKNOWN
|
||||||
|
|
||||||
@patch(
|
|
||||||
"homeassistant.helpers.template.TemplateEnvironment.is_safe_callable",
|
|
||||||
return_value=True,
|
|
||||||
)
|
|
||||||
def test_period_parsing(self, mock):
|
|
||||||
"""Test the conversion from templates to period."""
|
|
||||||
now = datetime(2019, 1, 1, 23, 30, 0, tzinfo=dt_util.UTC)
|
|
||||||
with patch("homeassistant.util.dt.now", return_value=now):
|
|
||||||
today = Template(
|
|
||||||
"{{ now().replace(hour=0).replace(minute=0).replace(second=0) }}",
|
|
||||||
self.hass,
|
|
||||||
)
|
|
||||||
duration = timedelta(hours=2, minutes=1)
|
|
||||||
|
|
||||||
sensor1 = HistoryStatsSensor(
|
|
||||||
"test", "on", today, None, duration, "time", "test"
|
|
||||||
)
|
|
||||||
sensor1.hass = self.hass
|
|
||||||
sensor2 = HistoryStatsSensor(
|
|
||||||
"test", "on", None, today, duration, "time", "test"
|
|
||||||
)
|
|
||||||
sensor2.hass = self.hass
|
|
||||||
|
|
||||||
sensor1.update_period()
|
|
||||||
sensor1_start, sensor1_end = sensor1._period
|
|
||||||
sensor2.update_period()
|
|
||||||
sensor2_start, sensor2_end = sensor2._period
|
|
||||||
|
|
||||||
# Start = 00:00:00
|
|
||||||
assert sensor1_start.hour == 0
|
|
||||||
assert sensor1_start.minute == 0
|
|
||||||
assert sensor1_start.second == 0
|
|
||||||
|
|
||||||
# End = 02:01:00
|
|
||||||
assert sensor1_end.hour == 2
|
|
||||||
assert sensor1_end.minute == 1
|
|
||||||
assert sensor1_end.second == 0
|
|
||||||
|
|
||||||
# Start = 21:59:00
|
|
||||||
assert sensor2_start.hour == 21
|
|
||||||
assert sensor2_start.minute == 59
|
|
||||||
assert sensor2_start.second == 0
|
|
||||||
|
|
||||||
# End = 00:00:00
|
|
||||||
assert sensor2_end.hour == 0
|
|
||||||
assert sensor2_end.minute == 0
|
|
||||||
assert sensor2_end.second == 0
|
|
||||||
|
|
||||||
def test_wrong_date(self):
|
|
||||||
"""Test when start or end value is not a timestamp or a date."""
|
|
||||||
good = Template("{{ now() }}", self.hass)
|
|
||||||
bad = Template("{{ TEST }}", self.hass)
|
|
||||||
|
|
||||||
sensor1 = HistoryStatsSensor("test", "on", good, bad, None, "time", "Test")
|
|
||||||
sensor1.hass = self.hass
|
|
||||||
sensor2 = HistoryStatsSensor("test", "on", bad, good, None, "time", "Test")
|
|
||||||
sensor2.hass = self.hass
|
|
||||||
|
|
||||||
before_update1 = sensor1._period
|
|
||||||
before_update2 = sensor2._period
|
|
||||||
|
|
||||||
sensor1.update_period()
|
|
||||||
sensor2.update_period()
|
|
||||||
|
|
||||||
assert before_update1 == sensor1._period
|
|
||||||
assert before_update2 == sensor2._period
|
|
||||||
|
|
||||||
def test_wrong_duration(self):
|
def test_wrong_duration(self):
|
||||||
"""Test when duration value is not a timedelta."""
|
"""Test when duration value is not a timedelta."""
|
||||||
self.init_recorder()
|
self.init_recorder()
|
||||||
@ -161,25 +92,6 @@ class TestHistoryStatsSensor(unittest.TestCase):
|
|||||||
with pytest.raises(TypeError):
|
with pytest.raises(TypeError):
|
||||||
setup_component(self.hass, "sensor", config)()
|
setup_component(self.hass, "sensor", config)()
|
||||||
|
|
||||||
def test_bad_template(self):
|
|
||||||
"""Test Exception when the template cannot be parsed."""
|
|
||||||
bad = Template("{{ x - 12 }}", self.hass) # x is undefined
|
|
||||||
duration = "01:00"
|
|
||||||
|
|
||||||
sensor1 = HistoryStatsSensor("test", "on", bad, None, duration, "time", "Test")
|
|
||||||
sensor1.hass = self.hass
|
|
||||||
sensor2 = HistoryStatsSensor("test", "on", None, bad, duration, "time", "Test")
|
|
||||||
sensor2.hass = self.hass
|
|
||||||
|
|
||||||
before_update1 = sensor1._period
|
|
||||||
before_update2 = sensor2._period
|
|
||||||
|
|
||||||
sensor1.update_period()
|
|
||||||
sensor2.update_period()
|
|
||||||
|
|
||||||
assert before_update1 == sensor1._period
|
|
||||||
assert before_update2 == sensor2._period
|
|
||||||
|
|
||||||
def test_not_enough_arguments(self):
|
def test_not_enough_arguments(self):
|
||||||
"""Test config when not enough arguments provided."""
|
"""Test config when not enough arguments provided."""
|
||||||
self.init_recorder()
|
self.init_recorder()
|
||||||
@ -224,6 +136,106 @@ class TestHistoryStatsSensor(unittest.TestCase):
|
|||||||
self.hass.start()
|
self.hass.start()
|
||||||
|
|
||||||
|
|
||||||
|
async def test_invalid_date_for_start(hass, recorder_mock):
|
||||||
|
"""Verify with an invalid date for start."""
|
||||||
|
await async_setup_component(
|
||||||
|
hass,
|
||||||
|
"sensor",
|
||||||
|
{
|
||||||
|
"sensor": {
|
||||||
|
"platform": "history_stats",
|
||||||
|
"entity_id": "binary_sensor.test_id",
|
||||||
|
"name": "test",
|
||||||
|
"state": "on",
|
||||||
|
"start": "{{ INVALID }}",
|
||||||
|
"duration": "01:00",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
assert hass.states.get("sensor.test").state == STATE_UNKNOWN
|
||||||
|
next_update_time = dt_util.utcnow() + timedelta(minutes=1)
|
||||||
|
with freeze_time(next_update_time):
|
||||||
|
async_fire_time_changed(hass, next_update_time)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
assert hass.states.get("sensor.test").state == STATE_UNAVAILABLE
|
||||||
|
|
||||||
|
|
||||||
|
async def test_invalid_date_for_end(hass, recorder_mock):
|
||||||
|
"""Verify with an invalid date for end."""
|
||||||
|
await async_setup_component(
|
||||||
|
hass,
|
||||||
|
"sensor",
|
||||||
|
{
|
||||||
|
"sensor": {
|
||||||
|
"platform": "history_stats",
|
||||||
|
"entity_id": "binary_sensor.test_id",
|
||||||
|
"name": "test",
|
||||||
|
"state": "on",
|
||||||
|
"end": "{{ INVALID }}",
|
||||||
|
"duration": "01:00",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
assert hass.states.get("sensor.test").state == STATE_UNKNOWN
|
||||||
|
next_update_time = dt_util.utcnow() + timedelta(minutes=1)
|
||||||
|
with freeze_time(next_update_time):
|
||||||
|
async_fire_time_changed(hass, next_update_time)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
assert hass.states.get("sensor.test").state == STATE_UNAVAILABLE
|
||||||
|
|
||||||
|
|
||||||
|
async def test_invalid_entity_in_template(hass, recorder_mock):
|
||||||
|
"""Verify with an invalid entity in the template."""
|
||||||
|
await async_setup_component(
|
||||||
|
hass,
|
||||||
|
"sensor",
|
||||||
|
{
|
||||||
|
"sensor": {
|
||||||
|
"platform": "history_stats",
|
||||||
|
"entity_id": "binary_sensor.test_id",
|
||||||
|
"name": "test",
|
||||||
|
"state": "on",
|
||||||
|
"end": "{{ states('binary_sensor.invalid').attributes.time }}",
|
||||||
|
"duration": "01:00",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
assert hass.states.get("sensor.test").state == STATE_UNKNOWN
|
||||||
|
next_update_time = dt_util.utcnow() + timedelta(minutes=1)
|
||||||
|
with freeze_time(next_update_time):
|
||||||
|
async_fire_time_changed(hass, next_update_time)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
assert hass.states.get("sensor.test").state == STATE_UNAVAILABLE
|
||||||
|
|
||||||
|
|
||||||
|
async def test_invalid_entity_returning_none_in_template(hass, recorder_mock):
|
||||||
|
"""Verify with an invalid entity returning none in the template."""
|
||||||
|
await async_setup_component(
|
||||||
|
hass,
|
||||||
|
"sensor",
|
||||||
|
{
|
||||||
|
"sensor": {
|
||||||
|
"platform": "history_stats",
|
||||||
|
"entity_id": "binary_sensor.test_id",
|
||||||
|
"name": "test",
|
||||||
|
"state": "on",
|
||||||
|
"end": "{{ states.binary_sensor.invalid.attributes.time }}",
|
||||||
|
"duration": "01:00",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
assert hass.states.get("sensor.test").state == STATE_UNKNOWN
|
||||||
|
next_update_time = dt_util.utcnow() + timedelta(minutes=1)
|
||||||
|
with freeze_time(next_update_time):
|
||||||
|
async_fire_time_changed(hass, next_update_time)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
assert hass.states.get("sensor.test").state == STATE_UNAVAILABLE
|
||||||
|
|
||||||
|
|
||||||
async def test_reload(hass, recorder_mock):
|
async def test_reload(hass, recorder_mock):
|
||||||
"""Verify we can reload history_stats sensors."""
|
"""Verify we can reload history_stats sensors."""
|
||||||
hass.state = ha.CoreState.not_running
|
hass.state = ha.CoreState.not_running
|
||||||
@ -334,6 +346,7 @@ async def test_measure_multiple(hass, recorder_mock):
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
with patch(
|
with patch(
|
||||||
"homeassistant.components.recorder.history.state_changes_during_period",
|
"homeassistant.components.recorder.history.state_changes_during_period",
|
||||||
@ -349,7 +362,7 @@ async def test_measure_multiple(hass, recorder_mock):
|
|||||||
assert hass.states.get("sensor.sensor4").state == "50.0"
|
assert hass.states.get("sensor.sensor4").state == "50.0"
|
||||||
|
|
||||||
|
|
||||||
async def async_test_measure(hass, recorder_mock):
|
async def test_measure(hass, recorder_mock):
|
||||||
"""Test the history statistics sensor measure."""
|
"""Test the history statistics sensor measure."""
|
||||||
start_time = dt_util.utcnow() - timedelta(minutes=60)
|
start_time = dt_util.utcnow() - timedelta(minutes=60)
|
||||||
t0 = start_time + timedelta(minutes=20)
|
t0 = start_time + timedelta(minutes=20)
|
||||||
@ -413,6 +426,7 @@ async def async_test_measure(hass, recorder_mock):
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
with patch(
|
with patch(
|
||||||
"homeassistant.components.recorder.history.state_changes_during_period",
|
"homeassistant.components.recorder.history.state_changes_during_period",
|
||||||
@ -422,10 +436,10 @@ async def async_test_measure(hass, recorder_mock):
|
|||||||
await async_update_entity(hass, f"sensor.sensor{i}")
|
await async_update_entity(hass, f"sensor.sensor{i}")
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert hass.states.get("sensor.sensor1").state == "0.5"
|
assert hass.states.get("sensor.sensor1").state == "0.83"
|
||||||
assert hass.states.get("sensor.sensor2").state == STATE_UNKNOWN
|
assert hass.states.get("sensor.sensor2").state == "0.83"
|
||||||
assert hass.states.get("sensor.sensor3").state == "2"
|
assert hass.states.get("sensor.sensor3").state == "1"
|
||||||
assert hass.states.get("sensor.sensor4").state == "50.0"
|
assert hass.states.get("sensor.sensor4").state == "83.3"
|
||||||
|
|
||||||
|
|
||||||
async def test_async_on_entire_period(hass, recorder_mock):
|
async def test_async_on_entire_period(hass, recorder_mock):
|
||||||
@ -493,6 +507,7 @@ async def test_async_on_entire_period(hass, recorder_mock):
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
with patch(
|
with patch(
|
||||||
"homeassistant.components.recorder.history.state_changes_during_period",
|
"homeassistant.components.recorder.history.state_changes_during_period",
|
||||||
@ -573,6 +588,7 @@ async def test_async_off_entire_period(hass, recorder_mock):
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
with patch(
|
with patch(
|
||||||
"homeassistant.components.recorder.history.state_changes_during_period",
|
"homeassistant.components.recorder.history.state_changes_during_period",
|
||||||
@ -636,6 +652,7 @@ async def test_async_start_from_history_and_switch_to_watching_state_changes_sin
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
await async_update_entity(hass, "sensor.sensor1")
|
await async_update_entity(hass, "sensor.sensor1")
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
@ -678,6 +695,14 @@ async def test_async_start_from_history_and_switch_to_watching_state_changes_sin
|
|||||||
|
|
||||||
assert hass.states.get("sensor.sensor1").state == "1.75"
|
assert hass.states.get("sensor.sensor1").state == "1.75"
|
||||||
|
|
||||||
|
# The window has ended, it should not change again
|
||||||
|
after_end_time = start_time + timedelta(minutes=125)
|
||||||
|
with freeze_time(after_end_time):
|
||||||
|
async_fire_time_changed(hass, after_end_time)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert hass.states.get("sensor.sensor1").state == "1.75"
|
||||||
|
|
||||||
|
|
||||||
async def test_async_start_from_history_and_switch_to_watching_state_changes_single_expanding_window(
|
async def test_async_start_from_history_and_switch_to_watching_state_changes_single_expanding_window(
|
||||||
hass,
|
hass,
|
||||||
@ -727,6 +752,7 @@ async def test_async_start_from_history_and_switch_to_watching_state_changes_sin
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
await async_update_entity(hass, "sensor.sensor1")
|
await async_update_entity(hass, "sensor.sensor1")
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
@ -762,6 +788,13 @@ async def test_async_start_from_history_and_switch_to_watching_state_changes_sin
|
|||||||
|
|
||||||
assert hass.states.get("sensor.sensor1").state == "1.5"
|
assert hass.states.get("sensor.sensor1").state == "1.5"
|
||||||
|
|
||||||
|
next_update_time = start_time + timedelta(minutes=107)
|
||||||
|
with freeze_time(next_update_time):
|
||||||
|
async_fire_time_changed(hass, next_update_time)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert hass.states.get("sensor.sensor1").state == "1.53"
|
||||||
|
|
||||||
end_time = start_time + timedelta(minutes=120)
|
end_time = start_time + timedelta(minutes=120)
|
||||||
with freeze_time(end_time):
|
with freeze_time(end_time):
|
||||||
async_fire_time_changed(hass, end_time)
|
async_fire_time_changed(hass, end_time)
|
||||||
@ -845,6 +878,8 @@ async def test_async_start_from_history_and_switch_to_watching_state_changes_mul
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
for i in range(1, 5):
|
for i in range(1, 5):
|
||||||
await async_update_entity(hass, f"sensor.sensor{i}")
|
await async_update_entity(hass, f"sensor.sensor{i}")
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
@ -1044,3 +1079,230 @@ async def test_does_not_work_into_the_future(hass, recorder_mock):
|
|||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert hass.states.get("sensor.sensor1").state == "0.0"
|
assert hass.states.get("sensor.sensor1").state == "0.0"
|
||||||
|
|
||||||
|
|
||||||
|
async def test_reload_before_start_event(hass, recorder_mock):
|
||||||
|
"""Verify we can reload history_stats sensors before the start event."""
|
||||||
|
hass.state = ha.CoreState.not_running
|
||||||
|
hass.states.async_set("binary_sensor.test_id", "on")
|
||||||
|
|
||||||
|
await async_setup_component(
|
||||||
|
hass,
|
||||||
|
"sensor",
|
||||||
|
{
|
||||||
|
"sensor": {
|
||||||
|
"platform": "history_stats",
|
||||||
|
"entity_id": "binary_sensor.test_id",
|
||||||
|
"name": "test",
|
||||||
|
"state": "on",
|
||||||
|
"start": "{{ as_timestamp(now()) - 3600 }}",
|
||||||
|
"duration": "01:00",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert len(hass.states.async_all()) == 2
|
||||||
|
|
||||||
|
assert hass.states.get("sensor.test")
|
||||||
|
|
||||||
|
yaml_path = get_fixture_path("configuration.yaml", "history_stats")
|
||||||
|
with patch.object(hass_config, "YAML_CONFIG_FILE", yaml_path):
|
||||||
|
await hass.services.async_call(
|
||||||
|
DOMAIN,
|
||||||
|
SERVICE_RELOAD,
|
||||||
|
{},
|
||||||
|
blocking=True,
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert len(hass.states.async_all()) == 2
|
||||||
|
|
||||||
|
assert hass.states.get("sensor.test") is None
|
||||||
|
assert hass.states.get("sensor.second_test")
|
||||||
|
|
||||||
|
|
||||||
|
async def test_measure_sliding_window(hass, recorder_mock):
|
||||||
|
"""Test the history statistics sensor with a moving end and a moving start."""
|
||||||
|
start_time = dt_util.utcnow() - timedelta(minutes=60)
|
||||||
|
t0 = start_time + timedelta(minutes=20)
|
||||||
|
t1 = t0 + timedelta(minutes=10)
|
||||||
|
t2 = t1 + timedelta(minutes=10)
|
||||||
|
|
||||||
|
# Start t0 t1 t2 End
|
||||||
|
# |--20min--|--20min--|--10min--|--10min--|
|
||||||
|
# |---off---|---on----|---off---|---on----|
|
||||||
|
|
||||||
|
def _fake_states(*args, **kwargs):
|
||||||
|
return {
|
||||||
|
"binary_sensor.test_id": [
|
||||||
|
ha.State("binary_sensor.test_id", "on", last_changed=t0),
|
||||||
|
ha.State("binary_sensor.test_id", "off", last_changed=t1),
|
||||||
|
ha.State("binary_sensor.test_id", "on", last_changed=t2),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
await async_setup_component(
|
||||||
|
hass,
|
||||||
|
"sensor",
|
||||||
|
{
|
||||||
|
"sensor": [
|
||||||
|
{
|
||||||
|
"platform": "history_stats",
|
||||||
|
"entity_id": "binary_sensor.test_id",
|
||||||
|
"name": "sensor1",
|
||||||
|
"state": "on",
|
||||||
|
"start": "{{ as_timestamp(now()) - 3600 }}",
|
||||||
|
"end": "{{ as_timestamp(now()) + 3600 }}",
|
||||||
|
"type": "time",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"platform": "history_stats",
|
||||||
|
"entity_id": "binary_sensor.test_id",
|
||||||
|
"name": "sensor2",
|
||||||
|
"state": "on",
|
||||||
|
"start": "{{ as_timestamp(now()) - 3600 }}",
|
||||||
|
"end": "{{ as_timestamp(now()) + 3600 }}",
|
||||||
|
"type": "time",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"platform": "history_stats",
|
||||||
|
"entity_id": "binary_sensor.test_id",
|
||||||
|
"name": "sensor3",
|
||||||
|
"state": "on",
|
||||||
|
"start": "{{ as_timestamp(now()) - 3600 }}",
|
||||||
|
"end": "{{ as_timestamp(now()) + 3600 }}",
|
||||||
|
"type": "count",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"platform": "history_stats",
|
||||||
|
"entity_id": "binary_sensor.test_id",
|
||||||
|
"name": "sensor4",
|
||||||
|
"state": "on",
|
||||||
|
"start": "{{ as_timestamp(now()) - 3600 }}",
|
||||||
|
"end": "{{ as_timestamp(now()) + 3600 }}",
|
||||||
|
"type": "ratio",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
},
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"homeassistant.components.recorder.history.state_changes_during_period",
|
||||||
|
_fake_states,
|
||||||
|
), freeze_time(start_time):
|
||||||
|
for i in range(1, 5):
|
||||||
|
await async_update_entity(hass, f"sensor.sensor{i}")
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert hass.states.get("sensor.sensor1").state == "0.83"
|
||||||
|
assert hass.states.get("sensor.sensor2").state == "0.83"
|
||||||
|
assert hass.states.get("sensor.sensor3").state == "1"
|
||||||
|
assert hass.states.get("sensor.sensor4").state == "41.7"
|
||||||
|
|
||||||
|
past_next_update = start_time + timedelta(minutes=30)
|
||||||
|
with patch(
|
||||||
|
"homeassistant.components.recorder.history.state_changes_during_period",
|
||||||
|
_fake_states,
|
||||||
|
), freeze_time(past_next_update):
|
||||||
|
async_fire_time_changed(hass, past_next_update)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert hass.states.get("sensor.sensor1").state == "0.83"
|
||||||
|
assert hass.states.get("sensor.sensor2").state == "0.83"
|
||||||
|
assert hass.states.get("sensor.sensor3").state == "1"
|
||||||
|
assert hass.states.get("sensor.sensor4").state == "41.7"
|
||||||
|
|
||||||
|
|
||||||
|
async def test_measure_from_end_going_backwards(hass, recorder_mock):
|
||||||
|
"""Test the history statistics sensor with a moving end and a duration to find the start."""
|
||||||
|
start_time = dt_util.utcnow() - timedelta(minutes=60)
|
||||||
|
t0 = start_time + timedelta(minutes=20)
|
||||||
|
t1 = t0 + timedelta(minutes=10)
|
||||||
|
t2 = t1 + timedelta(minutes=10)
|
||||||
|
|
||||||
|
# Start t0 t1 t2 End
|
||||||
|
# |--20min--|--20min--|--10min--|--10min--|
|
||||||
|
# |---off---|---on----|---off---|---on----|
|
||||||
|
|
||||||
|
def _fake_states(*args, **kwargs):
|
||||||
|
return {
|
||||||
|
"binary_sensor.test_id": [
|
||||||
|
ha.State("binary_sensor.test_id", "on", last_changed=t0),
|
||||||
|
ha.State("binary_sensor.test_id", "off", last_changed=t1),
|
||||||
|
ha.State("binary_sensor.test_id", "on", last_changed=t2),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
await async_setup_component(
|
||||||
|
hass,
|
||||||
|
"sensor",
|
||||||
|
{
|
||||||
|
"sensor": [
|
||||||
|
{
|
||||||
|
"platform": "history_stats",
|
||||||
|
"entity_id": "binary_sensor.test_id",
|
||||||
|
"name": "sensor1",
|
||||||
|
"state": "on",
|
||||||
|
"duration": {"hours": 1},
|
||||||
|
"end": "{{ now() }}",
|
||||||
|
"type": "time",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"platform": "history_stats",
|
||||||
|
"entity_id": "binary_sensor.test_id",
|
||||||
|
"name": "sensor2",
|
||||||
|
"state": "on",
|
||||||
|
"duration": {"hours": 1},
|
||||||
|
"end": "{{ now() }}",
|
||||||
|
"type": "time",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"platform": "history_stats",
|
||||||
|
"entity_id": "binary_sensor.test_id",
|
||||||
|
"name": "sensor3",
|
||||||
|
"state": "on",
|
||||||
|
"duration": {"hours": 1},
|
||||||
|
"end": "{{ now() }}",
|
||||||
|
"type": "count",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"platform": "history_stats",
|
||||||
|
"entity_id": "binary_sensor.test_id",
|
||||||
|
"name": "sensor4",
|
||||||
|
"state": "on",
|
||||||
|
"duration": {"hours": 1},
|
||||||
|
"end": "{{ now() }}",
|
||||||
|
"type": "ratio",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
},
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"homeassistant.components.recorder.history.state_changes_during_period",
|
||||||
|
_fake_states,
|
||||||
|
), freeze_time(start_time):
|
||||||
|
for i in range(1, 5):
|
||||||
|
await async_update_entity(hass, f"sensor.sensor{i}")
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert hass.states.get("sensor.sensor1").state == "0.83"
|
||||||
|
assert hass.states.get("sensor.sensor2").state == "0.83"
|
||||||
|
assert hass.states.get("sensor.sensor3").state == "1"
|
||||||
|
assert hass.states.get("sensor.sensor4").state == "83.3"
|
||||||
|
|
||||||
|
past_next_update = start_time + timedelta(minutes=30)
|
||||||
|
with patch(
|
||||||
|
"homeassistant.components.recorder.history.state_changes_during_period",
|
||||||
|
_fake_states,
|
||||||
|
), freeze_time(past_next_update):
|
||||||
|
async_fire_time_changed(hass, past_next_update)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert hass.states.get("sensor.sensor1").state == "0.83"
|
||||||
|
assert hass.states.get("sensor.sensor2").state == "0.83"
|
||||||
|
assert hass.states.get("sensor.sensor3").state == "1"
|
||||||
|
assert hass.states.get("sensor.sensor4").state == "83.3"
|
||||||
|
Loading…
x
Reference in New Issue
Block a user