Add last_reported to state reported event data (#148932)

This commit is contained in:
Erik Montnemery 2025-07-18 13:53:30 +02:00 committed by GitHub
parent 277241c4d3
commit 1743766d17
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 109 additions and 49 deletions

View File

@ -320,7 +320,12 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
# changed state, then we know it will still be zero.
return
schedule_max_sub_interval_exceeded(new_state)
calc_derivative(new_state, new_state.state, event.data["old_last_reported"])
calc_derivative(
new_state,
new_state.state,
event.data["last_reported"],
event.data["old_last_reported"],
)
@callback
def on_state_changed(event: Event[EventStateChangedData]) -> None:
@ -334,19 +339,27 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
schedule_max_sub_interval_exceeded(new_state)
old_state = event.data["old_state"]
if old_state is not None:
calc_derivative(new_state, old_state.state, old_state.last_reported)
calc_derivative(
new_state,
old_state.state,
new_state.last_updated,
old_state.last_reported,
)
else:
# On first state change from none, update availability
self.async_write_ha_state()
def calc_derivative(
new_state: State, old_value: str, old_last_reported: datetime
new_state: State,
old_value: str,
new_timestamp: datetime,
old_timestamp: datetime,
) -> None:
"""Handle the sensor state changes."""
if not _is_decimal_state(old_value):
if self._last_valid_state_time:
old_value = self._last_valid_state_time[0]
old_last_reported = self._last_valid_state_time[1]
old_timestamp = self._last_valid_state_time[1]
else:
# Sensor becomes valid for the first time, just keep the restored value
self.async_write_ha_state()
@ -358,12 +371,10 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
"" if unit is None else unit
)
self._prune_state_list(new_state.last_reported)
self._prune_state_list(new_timestamp)
try:
elapsed_time = (
new_state.last_reported - old_last_reported
).total_seconds()
elapsed_time = (new_timestamp - old_timestamp).total_seconds()
delta_value = Decimal(new_state.state) - Decimal(old_value)
new_derivative = (
delta_value
@ -392,12 +403,10 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
return
# add latest derivative to the window list
self._state_list.append(
(old_last_reported, new_state.last_reported, new_derivative)
)
self._state_list.append((old_timestamp, new_timestamp, new_derivative))
self._last_valid_state_time = (
new_state.state,
new_state.last_reported,
new_timestamp,
)
# If outside of time window just report derivative (is the same as modeling it in the window),
@ -405,9 +414,7 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
if elapsed_time > self._time_window:
derivative = new_derivative
else:
derivative = self._calc_derivative_from_state_list(
new_state.last_reported
)
derivative = self._calc_derivative_from_state_list(new_timestamp)
self._write_native_value(derivative)
source_state = self.hass.states.get(self._sensor_source_id)

View File

@ -463,7 +463,7 @@ class IntegrationSensor(RestoreSensor):
) -> None:
"""Handle sensor state update when sub interval is configured."""
self._integrate_on_state_update_with_max_sub_interval(
None, event.data["old_state"], event.data["new_state"]
None, None, event.data["old_state"], event.data["new_state"]
)
@callback
@ -472,13 +472,17 @@ class IntegrationSensor(RestoreSensor):
) -> None:
"""Handle sensor state report when sub interval is configured."""
self._integrate_on_state_update_with_max_sub_interval(
event.data["old_last_reported"], None, event.data["new_state"]
event.data["old_last_reported"],
event.data["last_reported"],
None,
event.data["new_state"],
)
@callback
def _integrate_on_state_update_with_max_sub_interval(
self,
old_last_reported: datetime | None,
old_timestamp: datetime | None,
new_timestamp: datetime | None,
old_state: State | None,
new_state: State | None,
) -> None:
@ -489,7 +493,9 @@ class IntegrationSensor(RestoreSensor):
"""
self._cancel_max_sub_interval_exceeded_callback()
try:
self._integrate_on_state_change(old_last_reported, old_state, new_state)
self._integrate_on_state_change(
old_timestamp, new_timestamp, old_state, new_state
)
self._last_integration_trigger = _IntegrationTrigger.StateEvent
self._last_integration_time = datetime.now(tz=UTC)
finally:
@ -503,7 +509,7 @@ class IntegrationSensor(RestoreSensor):
) -> None:
"""Handle sensor state change."""
return self._integrate_on_state_change(
None, event.data["old_state"], event.data["new_state"]
None, None, event.data["old_state"], event.data["new_state"]
)
@callback
@ -512,12 +518,16 @@ class IntegrationSensor(RestoreSensor):
) -> None:
"""Handle sensor state report."""
return self._integrate_on_state_change(
event.data["old_last_reported"], None, event.data["new_state"]
event.data["old_last_reported"],
event.data["last_reported"],
None,
event.data["new_state"],
)
def _integrate_on_state_change(
self,
old_last_reported: datetime | None,
old_timestamp: datetime | None,
new_timestamp: datetime | None,
old_state: State | None,
new_state: State | None,
) -> None:
@ -531,16 +541,17 @@ class IntegrationSensor(RestoreSensor):
if old_state:
# state has changed, we recover old_state from the event
new_timestamp = new_state.last_updated
old_state_state = old_state.state
old_last_reported = old_state.last_reported
old_timestamp = old_state.last_reported
else:
# event state reported without any state change
# first state or event state reported without any state change
old_state_state = new_state.state
self._attr_available = True
self._derive_and_set_attributes_from_state(new_state)
if old_last_reported is None and old_state is None:
if old_timestamp is None and old_state is None:
self.async_write_ha_state()
return
@ -551,11 +562,12 @@ class IntegrationSensor(RestoreSensor):
return
if TYPE_CHECKING:
assert old_last_reported is not None
assert new_timestamp is not None
assert old_timestamp is not None
elapsed_seconds = Decimal(
(new_state.last_reported - old_last_reported).total_seconds()
(new_timestamp - old_timestamp).total_seconds()
if self._last_integration_trigger == _IntegrationTrigger.StateEvent
else (new_state.last_reported - self._last_integration_time).total_seconds()
else (new_timestamp - self._last_integration_time).total_seconds()
)
area = self._method.calculate_area_with_two_states(elapsed_seconds, *states)

View File

@ -727,12 +727,11 @@ class StatisticsSensor(SensorEntity):
def _async_handle_new_state(
self,
reported_state: State | None,
reported_state: State,
timestamp: float,
) -> None:
"""Handle the sensor state changes."""
if (new_state := reported_state) is None:
return
self._add_state_to_queue(new_state)
self._add_state_to_queue(reported_state, timestamp)
self._async_purge_update_and_schedule()
if self._preview_callback:
@ -747,14 +746,18 @@ class StatisticsSensor(SensorEntity):
self,
event: Event[EventStateChangedData],
) -> None:
self._async_handle_new_state(event.data["new_state"])
if (new_state := event.data["new_state"]) is None:
return
self._async_handle_new_state(new_state, new_state.last_updated_timestamp)
@callback
def _async_stats_sensor_state_report_listener(
self,
event: Event[EventStateReportedData],
) -> None:
self._async_handle_new_state(event.data["new_state"])
self._async_handle_new_state(
event.data["new_state"], event.data["last_reported"].timestamp()
)
async def _async_stats_sensor_startup(self) -> None:
"""Add listener and get recorded state.
@ -785,7 +788,9 @@ class StatisticsSensor(SensorEntity):
"""Register callbacks."""
await self._async_stats_sensor_startup()
def _add_state_to_queue(self, new_state: State) -> None:
def _add_state_to_queue(
self, new_state: State, last_reported_timestamp: float
) -> None:
"""Add the state to the queue."""
# Attention: it is not safe to store the new_state object,
@ -805,7 +810,7 @@ class StatisticsSensor(SensorEntity):
self.states.append(new_state.state == "on")
else:
self.states.append(float(new_state.state))
self.ages.append(new_state.last_reported_timestamp)
self.ages.append(last_reported_timestamp)
self._attr_extra_state_attributes[STAT_SOURCE_VALUE_VALID] = True
except ValueError:
self._attr_extra_state_attributes[STAT_SOURCE_VALUE_VALID] = False
@ -1062,7 +1067,7 @@ class StatisticsSensor(SensorEntity):
self._fetch_states_from_database
):
for state in reversed(states):
self._add_state_to_queue(state)
self._add_state_to_queue(state, state.last_reported_timestamp)
self._calculate_state_attributes(state)
self._async_purge_update_and_schedule()

View File

@ -157,7 +157,6 @@ class EventStateEventData(TypedDict):
"""Base class for EVENT_STATE_CHANGED and EVENT_STATE_REPORTED data."""
entity_id: str
new_state: State | None
class EventStateChangedData(EventStateEventData):
@ -166,6 +165,7 @@ class EventStateChangedData(EventStateEventData):
A state changed event is fired when on state write the state is changed.
"""
new_state: State | None
old_state: State | None
@ -175,6 +175,8 @@ class EventStateReportedData(EventStateEventData):
A state reported event is fired when on state write the state is unchanged.
"""
last_reported: datetime.datetime
new_state: State
old_last_reported: datetime.datetime
@ -1749,18 +1751,38 @@ class CompressedState(TypedDict):
class State:
"""Object to represent a state within the state machine.
"""Object to represent a state within the state machine."""
entity_id: the entity that is represented.
state: the state of the entity
attributes: extra information on entity and state
last_changed: last time the state was changed.
last_reported: last time the state was reported.
last_updated: last time the state or attributes were changed.
context: Context in which it was created
domain: Domain of this state.
object_id: Object id of this state.
entity_id: str
"""The entity that is represented by the state."""
domain: str
"""Domain of the entity that is represented by the state."""
object_id: str
"""object_id: Object id of this state."""
state: str
"""The state of the entity."""
attributes: ReadOnlyDict[str, Any]
"""Extra information on entity and state"""
last_changed: datetime.datetime
"""Last time the state was changed."""
last_reported: datetime.datetime
"""Last time the state was reported.
Note: When the state is set and neither the state nor attributes are
changed, the existing state will be mutated with an updated last_reported.
When handling a state change event, the last_reported attribute of the old
state will not be modified and can safely be used. The last_reported attribute
of the new state may be modified and the last_updated attribute should be used
instead.
When handling a state report event, the last_reported attribute may be
modified and last_reported from the event data should be used instead.
"""
last_updated: datetime.datetime
"""Last time the state or attributes were changed."""
context: Context
"""Context in which the state was created."""
__slots__ = (
"_cache",
@ -1841,7 +1863,20 @@ class State:
@under_cached_property
def last_reported_timestamp(self) -> float:
"""Timestamp of last report."""
"""Timestamp of last report.
Note: When the state is set and neither the state nor attributes are
changed, the existing state will be mutated with an updated last_reported.
When handling a state change event, the last_reported_timestamp attribute
of the old state will not be modified and can safely be used. The
last_reported_timestamp attribute of the new state may be modified and the
last_updated_timestamp attribute should be used instead.
When handling a state report event, the last_reported_timestamp attribute may
be modified and last_reported from the event data should be used instead.
"""
return self.last_reported.timestamp()
@under_cached_property
@ -2340,6 +2375,7 @@ class StateMachine:
EVENT_STATE_REPORTED,
{
"entity_id": entity_id,
"last_reported": now,
"old_last_reported": old_last_reported,
"new_state": old_state,
},