mirror of
https://github.com/home-assistant/core.git
synced 2025-07-21 20:27:08 +00:00
Remove support for legacy logbook events created before 0.112 (#37822)
* Remove support for legacy logbook events created before 0.112 Reduce the complexity of the logbook code. This should also have a small performance boost. * None is the default
This commit is contained in:
parent
261f0b971c
commit
9db6318122
@ -215,15 +215,13 @@ class LogbookView(HomeAssistantView):
|
||||
return await hass.async_add_job(json_events)
|
||||
|
||||
|
||||
def humanify(hass, events, entity_attr_cache, prev_states=None):
|
||||
def humanify(hass, events, entity_attr_cache):
|
||||
"""Generate a converted list of events into Entry objects.
|
||||
|
||||
Will try to group events if possible:
|
||||
- if 2+ sensor updates in GROUP_BY_MINUTES, show last
|
||||
- if Home Assistant stop and start happen in same minute call it restarted
|
||||
"""
|
||||
if prev_states is None:
|
||||
prev_states = {}
|
||||
|
||||
# Group events in batches of GROUP_BY_MINUTES
|
||||
for _, g_events in groupby(
|
||||
@ -270,12 +268,6 @@ def humanify(hass, events, entity_attr_cache, prev_states=None):
|
||||
|
||||
if event.event_type == EVENT_STATE_CHANGED:
|
||||
entity_id = event.entity_id
|
||||
|
||||
# Skip events that have not changed state
|
||||
if entity_id in prev_states and prev_states[entity_id] == event.state:
|
||||
continue
|
||||
|
||||
prev_states[entity_id] = event.state
|
||||
domain = event.domain
|
||||
|
||||
if (
|
||||
@ -385,16 +377,10 @@ def _get_events(
|
||||
.outerjoin(old_state, (States.old_state_id == old_state.state_id))
|
||||
# The below filter, removes state change events that do not have
|
||||
# and old_state, new_state, or the old and
|
||||
# new state are the same for v8 schema or later.
|
||||
# new state.
|
||||
#
|
||||
# If the events/states were stored before v8 schema, we relay on the
|
||||
# prev_states dict to remove them.
|
||||
#
|
||||
# When all data is schema v8 or later, the check for EMPTY_JSON_OBJECT
|
||||
# can be removed.
|
||||
.filter(
|
||||
(Events.event_type != EVENT_STATE_CHANGED)
|
||||
| (Events.event_data != EMPTY_JSON_OBJECT)
|
||||
| (
|
||||
(States.state_id.isnot(None))
|
||||
& (old_state.state_id.isnot(None))
|
||||
@ -438,18 +424,12 @@ def _get_events(
|
||||
entity_filter | (Events.event_type != EVENT_STATE_CHANGED)
|
||||
)
|
||||
|
||||
# When all data is schema v8 or later, prev_states can be removed
|
||||
prev_states = {}
|
||||
return list(humanify(hass, yield_events(query), entity_attr_cache, prev_states))
|
||||
return list(humanify(hass, yield_events(query), entity_attr_cache))
|
||||
|
||||
|
||||
def _keep_event(hass, event, entities_filter):
|
||||
if event.event_type == EVENT_STATE_CHANGED:
|
||||
entity_id = event.entity_id
|
||||
# Do not report on new entities
|
||||
# Do not report on entity removal
|
||||
if not event.has_old_and_new_state:
|
||||
return False
|
||||
elif event.event_type in HOMEASSISTANT_EVENTS:
|
||||
entity_id = f"{HA_DOMAIN}."
|
||||
elif event.event_type in hass.data[DOMAIN] and ATTR_ENTITY_ID not in event.data:
|
||||
@ -640,25 +620,6 @@ class LazyEventPartialState:
|
||||
)
|
||||
return self._time_fired_isoformat
|
||||
|
||||
@property
|
||||
def has_old_and_new_state(self):
|
||||
"""Check the json data to see if new_state and old_state is present without decoding."""
|
||||
# Delete this check once all states are saved in the v8 schema
|
||||
# format or later (they have the old_state_id column).
|
||||
|
||||
# New events in v8+ schema format
|
||||
if self._row.event_data == EMPTY_JSON_OBJECT:
|
||||
# Events are already pre-filtered in sql
|
||||
# to exclude missing old and new state
|
||||
# if they are in v8+ format
|
||||
return True
|
||||
|
||||
# Old events not in v8 schema format
|
||||
return (
|
||||
'"old_state": {' in self._row.event_data
|
||||
and '"new_state": {' in self._row.event_data
|
||||
)
|
||||
|
||||
|
||||
class EntityAttributeCache:
|
||||
"""A cache to lookup static entity_id attribute.
|
||||
@ -684,9 +645,7 @@ class EntityAttributeCache:
|
||||
if current_state:
|
||||
# Try the current state as its faster than decoding the
|
||||
# attributes
|
||||
self._cache[entity_id][attribute] = current_state.attributes.get(
|
||||
attribute, None
|
||||
)
|
||||
self._cache[entity_id][attribute] = current_state.attributes.get(attribute)
|
||||
else:
|
||||
# If the entity has been removed, decode the attributes
|
||||
# instead
|
||||
|
@ -572,43 +572,6 @@ class TestComponentLogbook(unittest.TestCase):
|
||||
entries[5], pointC, "included", domain="light", entity_id=entity_id4
|
||||
)
|
||||
|
||||
def test_exclude_attribute_changes(self):
|
||||
"""Test if events of attribute changes are filtered."""
|
||||
pointA = dt_util.utcnow()
|
||||
pointB = pointA + timedelta(minutes=1)
|
||||
pointC = pointB + timedelta(minutes=1)
|
||||
entity_attr_cache = logbook.EntityAttributeCache(self.hass)
|
||||
|
||||
state_off = ha.State("light.kitchen", "off", {}, pointA, pointA).as_dict()
|
||||
state_100 = ha.State(
|
||||
"light.kitchen", "on", {"brightness": 100}, pointB, pointB
|
||||
).as_dict()
|
||||
state_200 = ha.State(
|
||||
"light.kitchen", "on", {"brightness": 200}, pointB, pointC
|
||||
).as_dict()
|
||||
|
||||
eventA = self.create_state_changed_event_from_old_new(
|
||||
"light.kitchen", pointB, state_off, state_100
|
||||
)
|
||||
eventB = self.create_state_changed_event_from_old_new(
|
||||
"light.kitchen", pointC, state_100, state_200
|
||||
)
|
||||
|
||||
entities_filter = convert_include_exclude_filter(
|
||||
logbook.CONFIG_SCHEMA({logbook.DOMAIN: {}})[logbook.DOMAIN]
|
||||
)
|
||||
events = [
|
||||
e
|
||||
for e in (eventA, eventB)
|
||||
if logbook._keep_event(self.hass, e, entities_filter)
|
||||
]
|
||||
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
|
||||
|
||||
assert len(entries) == 1
|
||||
self.assert_entry(
|
||||
entries[0], pointB, "kitchen", domain="light", entity_id="light.kitchen"
|
||||
)
|
||||
|
||||
def test_home_assistant_start_stop_grouped(self):
|
||||
"""Test if HA start and stop events are grouped.
|
||||
|
||||
@ -1835,6 +1798,42 @@ async def test_exclude_removed_entities(hass, hass_client):
|
||||
assert response_json[2]["entity_id"] == entity_id2
|
||||
|
||||
|
||||
async def test_exclude_attribute_changes(hass, hass_client):
|
||||
"""Test if events of attribute changes are filtered."""
|
||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
||||
await async_setup_component(hass, "logbook", {})
|
||||
await hass.async_add_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
||||
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
|
||||
hass.states.async_set("light.kitchen", STATE_OFF)
|
||||
hass.states.async_set("light.kitchen", STATE_ON, {"brightness": 100})
|
||||
hass.states.async_set("light.kitchen", STATE_ON, {"brightness": 200})
|
||||
hass.states.async_set("light.kitchen", STATE_ON, {"brightness": 300})
|
||||
hass.states.async_set("light.kitchen", STATE_ON, {"brightness": 400})
|
||||
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await hass.async_add_job(partial(trigger_db_commit, hass))
|
||||
await hass.async_block_till_done()
|
||||
|
||||
client = await hass_client()
|
||||
|
||||
# Today time 00:00:00
|
||||
start = dt_util.utcnow().date()
|
||||
start_date = datetime(start.year, start.month, start.day)
|
||||
|
||||
# Test today entries without filters
|
||||
response = await client.get(f"/api/logbook/{start_date.isoformat()}")
|
||||
assert response.status == 200
|
||||
response_json = await response.json()
|
||||
|
||||
assert len(response_json) == 2
|
||||
assert response_json[0]["domain"] == "homeassistant"
|
||||
assert response_json[1]["message"] == "turned on"
|
||||
assert response_json[1]["entity_id"] == "light.kitchen"
|
||||
|
||||
|
||||
class MockLazyEventPartialState(ha.Event):
|
||||
"""Minimal mock of a Lazy event."""
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user