Refactor logbook to reduce overhead and complexity (#71509)

This commit is contained in:
J. Nick Koston 2022-05-08 23:47:26 -05:00 committed by GitHub
parent 15a5878a39
commit 24d7a464e1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 573 additions and 680 deletions

View File

@ -326,9 +326,10 @@ class LogbookView(HomeAssistantView):
def humanify(
hass: HomeAssistant,
events: Generator[LazyEventPartialState, None, None],
entity_attr_cache: EntityAttributeCache,
context_lookup: dict[str | None, LazyEventPartialState | None],
rows: Generator[Row, None, None],
entity_name_cache: EntityNameCache,
event_cache: EventCache,
context_augmenter: ContextAugmenter,
) -> Generator[dict[str, Any], None, None]:
"""Generate a converted list of events into Entry objects.
@ -336,25 +337,24 @@ def humanify(
- if Home Assistant stop and start happen in same minute call it restarted
"""
external_events = hass.data.get(DOMAIN, {})
# Group events in batches of GROUP_BY_MINUTES
for _, g_events in groupby(
events, lambda event: event.time_fired_minute // GROUP_BY_MINUTES
):
events_batch = list(g_events)
# Continuous sensors, will be excluded from the logbook
continuous_sensors = {}
# Group events in batches of GROUP_BY_MINUTES
for _, g_rows in groupby(
rows, lambda row: row.time_fired.minute // GROUP_BY_MINUTES # type: ignore[no-any-return]
):
rows_batch = list(g_rows)
# Group HA start/stop events
# Maps minute of event to 1: stop, 2: stop + start
start_stop_events = {}
# Process events
for event in events_batch:
if event.event_type == EVENT_STATE_CHANGED:
entity_id = event.entity_id
for row in rows_batch:
if row.event_type == EVENT_STATE_CHANGED:
entity_id = row.entity_id
if (
entity_id in continuous_sensors
or split_entity_id(entity_id)[0] != SENSOR_DOMAIN
@ -363,22 +363,22 @@ def humanify(
assert entity_id is not None
continuous_sensors[entity_id] = _is_sensor_continuous(hass, entity_id)
elif event.event_type == EVENT_HOMEASSISTANT_STOP:
if event.time_fired_minute in start_stop_events:
elif row.event_type == EVENT_HOMEASSISTANT_STOP:
if row.time_fired.minute in start_stop_events:
continue
start_stop_events[event.time_fired_minute] = 1
start_stop_events[row.time_fired.minute] = 1
elif event.event_type == EVENT_HOMEASSISTANT_START:
if event.time_fired_minute not in start_stop_events:
elif row.event_type == EVENT_HOMEASSISTANT_START:
if row.time_fired.minute not in start_stop_events:
continue
start_stop_events[event.time_fired_minute] = 2
start_stop_events[row.time_fired.minute] = 2
# Yield entries
for event in events_batch:
if event.event_type == EVENT_STATE_CHANGED:
entity_id = event.entity_id
for row in rows_batch:
if row.event_type == EVENT_STATE_CHANGED:
entity_id = row.entity_id
assert entity_id is not None
if continuous_sensors.get(entity_id):
@ -386,74 +386,59 @@ def humanify(
continue
data = {
"when": event.time_fired_isoformat,
"name": _entity_name_from_event(
entity_id, event, entity_attr_cache
),
"state": event.state,
"when": _row_time_fired_isoformat(row),
"name": entity_name_cache.get(entity_id, row),
"state": row.state,
"entity_id": entity_id,
}
if icon := event.attributes_icon:
if icon := _row_attributes_extract(row, ICON_JSON_EXTRACT):
data["icon"] = icon
if event.context_user_id:
data["context_user_id"] = event.context_user_id
if row.context_user_id:
data["context_user_id"] = row.context_user_id
_augment_data_with_context(
data,
entity_id,
event,
context_lookup,
entity_attr_cache,
external_events,
)
context_augmenter.augment(data, entity_id, row)
yield data
elif event.event_type in external_events:
domain, describe_event = external_events[event.event_type]
data = describe_event(event)
data["when"] = event.time_fired_isoformat
elif row.event_type in external_events:
domain, describe_event = external_events[row.event_type]
data = describe_event(event_cache.get(row))
data["when"] = _row_time_fired_isoformat(row)
data["domain"] = domain
if event.context_user_id:
data["context_user_id"] = event.context_user_id
if row.context_user_id:
data["context_user_id"] = row.context_user_id
_augment_data_with_context(
data,
data.get(ATTR_ENTITY_ID),
event,
context_lookup,
entity_attr_cache,
external_events,
)
entity_id = data.get(ATTR_ENTITY_ID)
context_augmenter.augment(data, entity_id, row)
yield data
elif event.event_type == EVENT_HOMEASSISTANT_START:
if start_stop_events.get(event.time_fired_minute) == 2:
elif row.event_type == EVENT_HOMEASSISTANT_START:
if start_stop_events.get(row.time_fired.minute) == 2:
continue
yield {
"when": event.time_fired_isoformat,
"when": _row_time_fired_isoformat(row),
"name": "Home Assistant",
"message": "started",
"domain": HA_DOMAIN,
}
elif event.event_type == EVENT_HOMEASSISTANT_STOP:
if start_stop_events.get(event.time_fired_minute) == 2:
elif row.event_type == EVENT_HOMEASSISTANT_STOP:
if start_stop_events.get(row.time_fired.minute) == 2:
action = "restarted"
else:
action = "stopped"
yield {
"when": event.time_fired_isoformat,
"when": _row_time_fired_isoformat(row),
"name": "Home Assistant",
"message": action,
"domain": HA_DOMAIN,
}
elif event.event_type == EVENT_LOGBOOK_ENTRY:
elif row.event_type == EVENT_LOGBOOK_ENTRY:
event = event_cache.get(row)
event_data = event.data
domain = event_data.get(ATTR_DOMAIN)
entity_id = event_data.get(ATTR_ENTITY_ID)
@ -462,25 +447,17 @@ def humanify(
domain = split_entity_id(str(entity_id))[0]
data = {
"when": event.time_fired_isoformat,
"when": _row_time_fired_isoformat(row),
"name": event_data.get(ATTR_NAME),
"message": event_data.get(ATTR_MESSAGE),
"domain": domain,
"entity_id": entity_id,
}
if event.context_user_id:
data["context_user_id"] = event.context_user_id
_augment_data_with_context(
data,
entity_id,
event,
context_lookup,
entity_attr_cache,
external_events,
)
if row.context_user_id:
data["context_user_id"] = row.context_user_id
context_augmenter.augment(data, entity_id, row)
yield data
@ -499,21 +476,24 @@ def _get_events(
entity_ids and context_id
), "can't pass in both entity_ids and context_id"
entity_attr_cache = EntityAttributeCache(hass)
entity_name_cache = EntityNameCache(hass)
event_data_cache: dict[str, dict[str, Any]] = {}
context_lookup: dict[str | None, LazyEventPartialState | None] = {None: None}
context_lookup: dict[str | None, Row | None] = {None: None}
event_cache = EventCache(event_data_cache)
external_events = hass.data.get(DOMAIN, {})
context_augmenter = ContextAugmenter(
context_lookup, entity_name_cache, external_events, event_cache
)
def yield_events(query: Query) -> Generator[LazyEventPartialState, None, None]:
def yield_rows(query: Query) -> Generator[Row, None, None]:
"""Yield Events that are not filtered away."""
for row in query.yield_per(1000):
event = LazyEventPartialState(row, event_data_cache)
context_lookup.setdefault(event.context_id, event)
if event.event_type == EVENT_CALL_SERVICE:
continue
if event.event_type == EVENT_STATE_CHANGED or _keep_event(
hass, event, entities_filter
context_lookup.setdefault(row.context_id, row)
if row.event_type != EVENT_CALL_SERVICE and (
row.event_type == EVENT_STATE_CHANGED
or _keep_row(hass, row, entities_filter)
):
yield event
yield row
if entity_ids is not None:
entities_filter = generate_filter([], entity_ids, [], [])
@ -568,7 +548,13 @@ def _get_events(
query = query.order_by(Events.time_fired)
return list(
humanify(hass, yield_events(query), entity_attr_cache, context_lookup)
humanify(
hass,
yield_rows(query),
entity_name_cache,
event_cache,
context_augmenter,
)
)
@ -716,105 +702,110 @@ def _apply_event_entity_id_matchers(
return events_query.filter(sqlalchemy.or_(*ors))
def _keep_event(
def _keep_row(
hass: HomeAssistant,
event: LazyEventPartialState,
row: Row,
entities_filter: EntityFilter | Callable[[str], bool] | None = None,
) -> bool:
if event.event_type in HOMEASSISTANT_EVENTS:
event_type = row.event_type
if event_type in HOMEASSISTANT_EVENTS:
return entities_filter is None or entities_filter(HA_DOMAIN_ENTITY_ID)
if entity_id := event.data_entity_id:
if entity_id := _row_event_data_extract(row, ENTITY_ID_JSON_EXTRACT):
return entities_filter is None or entities_filter(entity_id)
if event.event_type in hass.data[DOMAIN]:
if event_type in hass.data[DOMAIN]:
# If the entity_id isn't described, use the domain that describes
# the event for filtering.
domain = hass.data[DOMAIN][event.event_type][0]
domain = hass.data[DOMAIN][event_type][0]
else:
domain = event.data_domain
domain = _row_event_data_extract(row, DOMAIN_JSON_EXTRACT)
return domain is not None and (
entities_filter is None or entities_filter(f"{domain}._")
)
def _augment_data_with_context(
data: dict[str, Any],
entity_id: str | None,
event: LazyEventPartialState,
context_lookup: dict[str | None, LazyEventPartialState | None],
entity_attr_cache: EntityAttributeCache,
class ContextAugmenter:
"""Augment data with context trace."""
def __init__(
self,
context_lookup: dict[str | None, Row | None],
entity_name_cache: EntityNameCache,
external_events: dict[
str, tuple[str, Callable[[LazyEventPartialState], dict[str, Any]]]
],
) -> None:
if not (context_event := context_lookup.get(event.context_id)):
event_cache: EventCache,
) -> None:
"""Init the augmenter."""
self.context_lookup = context_lookup
self.entity_name_cache = entity_name_cache
self.external_events = external_events
self.event_cache = event_cache
def augment(self, data: dict[str, Any], entity_id: str | None, row: Row) -> None:
"""Augment data from the row and cache."""
if not (context_row := self.context_lookup.get(row.context_id)):
return
if event == context_event:
if _rows_match(row, context_row):
# This is the first event with the given ID. Was it directly caused by
# a parent event?
if event.context_parent_id:
context_event = context_lookup.get(event.context_parent_id)
if (
not row.context_parent_id
or (context_row := self.context_lookup.get(row.context_parent_id))
is None
):
return
# Ensure the (parent) context_event exists and is not the root cause of
# this log entry.
if not context_event or event == context_event:
if _rows_match(row, context_row):
return
event_type = context_event.event_type
event_type = context_row.event_type
# State change
if context_entity_id := context_event.entity_id:
if context_entity_id := context_row.entity_id:
data["context_entity_id"] = context_entity_id
data["context_entity_id_name"] = _entity_name_from_event(
context_entity_id, context_event, entity_attr_cache
data["context_entity_id_name"] = self.entity_name_cache.get(
context_entity_id, context_row
)
data["context_event_type"] = event_type
return
event_data = context_event.data
# Call service
if event_type == EVENT_CALL_SERVICE:
event_data = context_event.data
event = self.event_cache.get(context_row)
event_data = event.data
data["context_domain"] = event_data.get(ATTR_DOMAIN)
data["context_service"] = event_data.get(ATTR_SERVICE)
data["context_event_type"] = event_type
return
if not entity_id or context_event == event:
if not entity_id:
return
if (attr_entity_id := context_event.data_entity_id) is None or (
attr_entity_id = _row_event_data_extract(context_row, ENTITY_ID_JSON_EXTRACT)
if attr_entity_id is None or (
event_type in SCRIPT_AUTOMATION_EVENTS and attr_entity_id == entity_id
):
return
data["context_entity_id"] = attr_entity_id
data["context_entity_id_name"] = _entity_name_from_event(
attr_entity_id, context_event, entity_attr_cache
data["context_entity_id_name"] = self.entity_name_cache.get(
attr_entity_id, context_row
)
data["context_event_type"] = event_type
if event_type in external_events:
domain, describe_event = external_events[event_type]
if event_type in self.external_events:
domain, describe_event = self.external_events[event_type]
data["context_domain"] = domain
if name := describe_event(context_event).get(ATTR_NAME):
event = self.event_cache.get(context_row)
if name := describe_event(event).get(ATTR_NAME):
data["context_name"] = name
def _entity_name_from_event(
entity_id: str,
event: LazyEventPartialState,
entity_attr_cache: EntityAttributeCache,
) -> str:
"""Extract the entity name from the event using the cache if possible."""
return entity_attr_cache.get(
entity_id, ATTR_FRIENDLY_NAME, event
) or split_entity_id(entity_id)[1].replace("_", " ")
def _is_sensor_continuous(
hass: HomeAssistant,
entity_id: str,
@ -834,23 +825,46 @@ def _is_sensor_continuous(
)
def _rows_match(row: Row, other_row: Row) -> bool:
"""Check of rows match by using the same method as Events __hash__."""
return bool(
row.event_type == other_row.event_type
and row.context_id == other_row.context_id
and row.time_fired == other_row.time_fired
)
def _row_event_data_extract(row: Row, extractor: re.Pattern) -> str | None:
"""Extract from event_data row."""
result = extractor.search(row.shared_data or row.event_data or "")
return result.group(1) if result else None
def _row_attributes_extract(row: Row, extractor: re.Pattern) -> str | None:
"""Extract from attributes row."""
result = extractor.search(row.shared_attrs or row.attributes or "")
return result.group(1) if result else None
def _row_time_fired_isoformat(row: Row) -> dt | None:
"""Convert the row timed_fired to isoformat."""
return process_timestamp_to_utc_isoformat(row.time_fired) or dt_util.utcnow()
class LazyEventPartialState:
"""A lazy version of core Event with limited State joined in."""
__slots__ = [
"_row",
"row",
"_event_data",
"_time_fired_isoformat",
"_attributes",
"_event_data_cache",
"event_type",
"entity_id",
"state",
"_domain",
"context_id",
"context_user_id",
"context_parent_id",
"time_fired_minute",
"_event_data_cache",
"data",
]
def __init__(
@ -859,83 +873,28 @@ class LazyEventPartialState:
event_data_cache: dict[str, dict[str, Any]],
) -> None:
"""Init the lazy event."""
self._row = row
self.row = row
self._event_data: dict[str, Any] | None = None
self._time_fired_isoformat: dt | None = None
self._domain: str | None = None
self.event_type: str = self._row.event_type
self.entity_id: str | None = self._row.entity_id
self.state = self._row.state
self.context_id: str | None = self._row.context_id
self.context_user_id: str | None = self._row.context_user_id
self.context_parent_id: str | None = self._row.context_parent_id
self.time_fired_minute: int = self._row.time_fired.minute
self._event_data_cache = event_data_cache
@property
def attributes_icon(self) -> str | None:
"""Extract the icon from the decoded attributes or json."""
result = ICON_JSON_EXTRACT.search(
self._row.shared_attrs or self._row.attributes or ""
)
return result.group(1) if result else None
@property
def data_entity_id(self) -> str | None:
"""Extract the entity id from the decoded data or json."""
if self._event_data:
return self._event_data.get(ATTR_ENTITY_ID)
result = ENTITY_ID_JSON_EXTRACT.search(
self._row.shared_data or self._row.event_data or ""
)
return result.group(1) if result else None
@property
def data_domain(self) -> str | None:
"""Extract the domain from the decoded data or json."""
result = DOMAIN_JSON_EXTRACT.search(
self._row.shared_data or self._row.event_data or ""
)
return result.group(1) if result else None
@property
def attributes_friendly_name(self) -> str | None:
"""Extract the friendly name from the decoded attributes or json."""
result = FRIENDLY_NAME_JSON_EXTRACT.search(
self._row.shared_attrs or self._row.attributes or ""
)
return result.group(1) if result else None
@property
def data(self) -> dict[str, Any]:
"""Event data."""
if self._event_data is None:
source: str = self._row.shared_data or self._row.event_data
self.event_type: str = self.row.event_type
self.entity_id: str | None = self.row.entity_id
self.state = self.row.state
self.context_id: str | None = self.row.context_id
self.context_user_id: str | None = self.row.context_user_id
self.context_parent_id: str | None = self.row.context_parent_id
source: str = self.row.shared_data or self.row.event_data
if not source:
self._event_data = {}
self.data = {}
elif event_data := self._event_data_cache.get(source):
self._event_data = event_data
self.data = event_data
else:
self._event_data = self._event_data_cache[source] = cast(
self.data = self._event_data_cache[source] = cast(
dict[str, Any], json.loads(source)
)
return self._event_data
@property
def time_fired_isoformat(self) -> dt | None:
"""Time event was fired in utc isoformat."""
if not self._time_fired_isoformat:
self._time_fired_isoformat = (
process_timestamp_to_utc_isoformat(self._row.time_fired)
or dt_util.utcnow()
)
return self._time_fired_isoformat
class EntityAttributeCache:
"""A cache to lookup static entity_id attribute.
class EntityNameCache:
"""A cache to lookup the name for an entity.
This class should not be used to lookup attributes
that are expected to change state.
@ -944,27 +903,37 @@ class EntityAttributeCache:
def __init__(self, hass: HomeAssistant) -> None:
"""Init the cache."""
self._hass = hass
self._cache: dict[str, dict[str, Any]] = {}
self._names: dict[str, str] = {}
def get(self, entity_id: str, attribute: str, event: LazyEventPartialState) -> Any:
"""Lookup an attribute for an entity or get it from the cache."""
if entity_id in self._cache:
if attribute in self._cache[entity_id]:
return self._cache[entity_id][attribute]
def get(self, entity_id: str, row: Row) -> str:
"""Lookup an the friendly name."""
if entity_id in self._names:
return self._names[entity_id]
if (current_state := self._hass.states.get(entity_id)) and (
friendly_name := current_state.attributes.get(ATTR_FRIENDLY_NAME)
):
self._names[entity_id] = friendly_name
elif extracted_name := _row_attributes_extract(row, FRIENDLY_NAME_JSON_EXTRACT):
self._names[entity_id] = extracted_name
else:
cache = self._cache[entity_id] = {}
return split_entity_id(entity_id)[1].replace("_", " ")
if current_state := self._hass.states.get(entity_id):
# Try the current state as its faster than decoding the
# attributes
cache[attribute] = current_state.attributes.get(attribute)
else:
# If the entity has been removed, decode the attributes
# instead
if attribute != ATTR_FRIENDLY_NAME:
raise ValueError(
f"{attribute} is not supported by {self.__class__.__name__}"
return self._names[entity_id]
class EventCache:
"""Cache LazyEventPartialState by row."""
def __init__(self, event_data_cache: dict[str, dict[str, Any]]) -> None:
"""Init the cache."""
self._event_data_cache = event_data_cache
self.event_cache: dict[Row, LazyEventPartialState] = {}
def get(self, row: Row) -> LazyEventPartialState:
"""Get the event from the row."""
if event := self.event_cache.get(row):
return event
event = self.event_cache[row] = LazyEventPartialState(
row, self._event_data_cache
)
cache[attribute] = event.attributes_friendly_name
return cache[attribute]
return event

View File

@ -16,7 +16,6 @@ from homeassistant.components.websocket_api.const import JSON_DUMP
from homeassistant.const import EVENT_STATE_CHANGED
from homeassistant.helpers.entityfilter import convert_include_exclude_filter
from homeassistant.helpers.json import JSONEncoder
from homeassistant.util import dt as dt_util
# mypy: allow-untyped-calls, allow-untyped-defs, no-check-untyped-defs
# mypy: no-warn-return-any
@ -224,57 +223,6 @@ async def state_changed_event_filter_helper(hass):
return timer() - start
@benchmark
async def logbook_filtering_state(hass):
"""Filter state changes."""
return await _logbook_filtering(hass, 1, 1)
@benchmark
async def logbook_filtering_attributes(hass):
"""Filter attribute changes."""
return await _logbook_filtering(hass, 1, 2)
@benchmark
async def _logbook_filtering(hass, last_changed, last_updated):
# pylint: disable=import-outside-toplevel
from homeassistant.components import logbook
entity_id = "test.entity"
old_state = {"entity_id": entity_id, "state": "off"}
new_state = {
"entity_id": entity_id,
"state": "on",
"last_updated": last_updated,
"last_changed": last_changed,
}
event = _create_state_changed_event_from_old_new(
entity_id, dt_util.utcnow(), old_state, new_state
)
entity_attr_cache = logbook.EntityAttributeCache(hass)
entities_filter = convert_include_exclude_filter(
logbook.INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA({})
)
def yield_events(event):
for _ in range(10**5):
# pylint: disable=protected-access
if logbook._keep_event(hass, event, entities_filter):
yield event
start = timer()
list(logbook.humanify(hass, yield_events(event), entity_attr_cache, {}))
return timer() - start
@benchmark
async def filtering_entity_id(hass):
"""Run a 100k state changes through entity filter."""

View File

@ -1,9 +1,8 @@
"""Tests for alexa."""
from homeassistant.components import logbook
from homeassistant.components.alexa.const import EVENT_ALEXA_SMART_HOME
from homeassistant.setup import async_setup_component
from tests.components.logbook.test_init import MockLazyEventPartialState
from tests.components.logbook.common import MockRow, mock_humanify
async def test_humanify_alexa_event(hass):
@ -12,17 +11,15 @@ async def test_humanify_alexa_event(hass):
await async_setup_component(hass, "alexa", {})
await async_setup_component(hass, "logbook", {})
hass.states.async_set("light.kitchen", "on", {"friendly_name": "Kitchen Light"})
entity_attr_cache = logbook.EntityAttributeCache(hass)
results = list(
logbook.humanify(
results = mock_humanify(
hass,
[
MockLazyEventPartialState(
MockRow(
EVENT_ALEXA_SMART_HOME,
{"request": {"namespace": "Alexa.Discovery", "name": "Discover"}},
),
MockLazyEventPartialState(
MockRow(
EVENT_ALEXA_SMART_HOME,
{
"request": {
@ -32,7 +29,7 @@ async def test_humanify_alexa_event(hass):
}
},
),
MockLazyEventPartialState(
MockRow(
EVENT_ALEXA_SMART_HOME,
{
"request": {
@ -43,9 +40,6 @@ async def test_humanify_alexa_event(hass):
},
),
],
entity_attr_cache,
{},
)
)
event1, event2, event3 = results

View File

@ -6,7 +6,6 @@ from unittest.mock import Mock, patch
import pytest
from homeassistant.components import logbook
import homeassistant.components.automation as automation
from homeassistant.components.automation import (
ATTR_SOURCE,
@ -53,7 +52,7 @@ from tests.common import (
async_mock_service,
mock_restore_cache,
)
from tests.components.logbook.test_init import MockLazyEventPartialState
from tests.components.logbook.common import MockRow, mock_humanify
@pytest.fixture
@ -1223,17 +1222,15 @@ async def test_logbook_humanify_automation_triggered_event(hass):
hass.config.components.add("recorder")
await async_setup_component(hass, automation.DOMAIN, {})
await async_setup_component(hass, "logbook", {})
entity_attr_cache = logbook.EntityAttributeCache(hass)
event1, event2 = list(
logbook.humanify(
event1, event2 = mock_humanify(
hass,
[
MockLazyEventPartialState(
MockRow(
EVENT_AUTOMATION_TRIGGERED,
{ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"},
),
MockLazyEventPartialState(
MockRow(
EVENT_AUTOMATION_TRIGGERED,
{
ATTR_ENTITY_ID: "automation.bye",
@ -1242,9 +1239,6 @@ async def test_logbook_humanify_automation_triggered_event(hass):
},
),
],
entity_attr_cache,
{},
)
)
assert event1["name"] == "Hello Automation"

View File

@ -1,9 +1,9 @@
"""Test automation logbook."""
from homeassistant.components import automation, logbook
from homeassistant.components import automation
from homeassistant.core import Context
from homeassistant.setup import async_setup_component
from tests.components.logbook.test_init import MockLazyEventPartialState
from tests.components.logbook.common import MockRow, mock_humanify
async def test_humanify_automation_trigger_event(hass):
@ -11,14 +11,12 @@ async def test_humanify_automation_trigger_event(hass):
hass.config.components.add("recorder")
assert await async_setup_component(hass, "automation", {})
assert await async_setup_component(hass, "logbook", {})
entity_attr_cache = logbook.EntityAttributeCache(hass)
context = Context()
event1, event2 = list(
logbook.humanify(
event1, event2 = mock_humanify(
hass,
[
MockLazyEventPartialState(
MockRow(
automation.EVENT_AUTOMATION_TRIGGERED,
{
"name": "Bla",
@ -27,7 +25,7 @@ async def test_humanify_automation_trigger_event(hass):
},
context=context,
),
MockLazyEventPartialState(
MockRow(
automation.EVENT_AUTOMATION_TRIGGERED,
{
"name": "Bla",
@ -36,9 +34,6 @@ async def test_humanify_automation_trigger_event(hass):
context=context,
),
],
entity_attr_cache,
{},
)
)
assert event1["name"] == "Bla"

View File

@ -2,7 +2,6 @@
from unittest.mock import patch
from homeassistant.components import logbook
from homeassistant.components.deconz.const import CONF_GESTURE, DOMAIN as DECONZ_DOMAIN
from homeassistant.components.deconz.deconz_event import (
CONF_DECONZ_ALARM_EVENT,
@ -21,7 +20,7 @@ from homeassistant.util import slugify
from .test_gateway import DECONZ_WEB_REQUEST, setup_deconz_integration
from tests.components.logbook.test_init import MockLazyEventPartialState
from tests.components.logbook.common import MockRow, mock_humanify
async def test_humanifying_deconz_alarm_event(hass, aioclient_mock):
@ -67,13 +66,11 @@ async def test_humanifying_deconz_alarm_event(hass, aioclient_mock):
hass.config.components.add("recorder")
assert await async_setup_component(hass, "logbook", {})
entity_attr_cache = logbook.EntityAttributeCache(hass)
events = list(
logbook.humanify(
events = mock_humanify(
hass,
[
MockLazyEventPartialState(
MockRow(
CONF_DECONZ_ALARM_EVENT,
{
CONF_CODE: 1234,
@ -84,9 +81,6 @@ async def test_humanifying_deconz_alarm_event(hass, aioclient_mock):
},
),
],
entity_attr_cache,
{},
)
)
assert events[0]["name"] == "Keypad"
@ -161,14 +155,12 @@ async def test_humanifying_deconz_event(hass, aioclient_mock):
hass.config.components.add("recorder")
assert await async_setup_component(hass, "logbook", {})
entity_attr_cache = logbook.EntityAttributeCache(hass)
events = list(
logbook.humanify(
events = mock_humanify(
hass,
[
# Event without matching device trigger
MockLazyEventPartialState(
MockRow(
CONF_DECONZ_EVENT,
{
CONF_DEVICE_ID: switch_entry.id,
@ -178,7 +170,7 @@ async def test_humanifying_deconz_event(hass, aioclient_mock):
},
),
# Event with matching device trigger
MockLazyEventPartialState(
MockRow(
CONF_DECONZ_EVENT,
{
CONF_DEVICE_ID: hue_remote_entry.id,
@ -188,7 +180,7 @@ async def test_humanifying_deconz_event(hass, aioclient_mock):
},
),
# Gesture with matching device trigger
MockLazyEventPartialState(
MockRow(
CONF_DECONZ_EVENT,
{
CONF_DEVICE_ID: xiaomi_cube_entry.id,
@ -198,7 +190,7 @@ async def test_humanifying_deconz_event(hass, aioclient_mock):
},
),
# Unsupported device trigger
MockLazyEventPartialState(
MockRow(
CONF_DECONZ_EVENT,
{
CONF_DEVICE_ID: xiaomi_cube_entry.id,
@ -208,7 +200,7 @@ async def test_humanifying_deconz_event(hass, aioclient_mock):
},
),
# Unknown event
MockLazyEventPartialState(
MockRow(
CONF_DECONZ_EVENT,
{
CONF_DEVICE_ID: faulty_entry.id,
@ -218,9 +210,6 @@ async def test_humanifying_deconz_event(hass, aioclient_mock):
},
),
],
entity_attr_cache,
{},
)
)
assert events[0]["name"] == "Switch 1"

View File

@ -1,5 +1,4 @@
"""The tests for Google Assistant logbook."""
from homeassistant.components import logbook
from homeassistant.components.google_assistant.const import (
DOMAIN,
EVENT_COMMAND_RECEIVED,
@ -9,7 +8,7 @@ from homeassistant.components.google_assistant.const import (
from homeassistant.const import ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME
from homeassistant.setup import async_setup_component
from tests.components.logbook.test_init import MockLazyEventPartialState
from tests.components.logbook.common import MockRow, mock_humanify
async def test_humanify_command_received(hass):
@ -18,17 +17,15 @@ async def test_humanify_command_received(hass):
hass.config.components.add("frontend")
hass.config.components.add("google_assistant")
assert await async_setup_component(hass, "logbook", {})
entity_attr_cache = logbook.EntityAttributeCache(hass)
hass.states.async_set(
"light.kitchen", "on", {ATTR_FRIENDLY_NAME: "The Kitchen Lights"}
)
events = list(
logbook.humanify(
events = mock_humanify(
hass,
[
MockLazyEventPartialState(
MockRow(
EVENT_COMMAND_RECEIVED,
{
"request_id": "abcd",
@ -42,7 +39,7 @@ async def test_humanify_command_received(hass):
"source": SOURCE_LOCAL,
},
),
MockLazyEventPartialState(
MockRow(
EVENT_COMMAND_RECEIVED,
{
"request_id": "abcd",
@ -57,9 +54,6 @@ async def test_humanify_command_received(hass):
},
),
],
entity_attr_cache,
{},
)
)
assert len(events) == 2

View File

@ -1,7 +1,6 @@
"""Test HomeKit initialization."""
from unittest.mock import patch
from homeassistant.components import logbook
from homeassistant.components.homekit.const import (
ATTR_DISPLAY_NAME,
ATTR_VALUE,
@ -11,7 +10,7 @@ from homeassistant.components.homekit.const import (
from homeassistant.const import ATTR_ENTITY_ID, ATTR_SERVICE
from homeassistant.setup import async_setup_component
from tests.components.logbook.test_init import MockLazyEventPartialState
from tests.components.logbook.common import MockRow, mock_humanify
async def test_humanify_homekit_changed_event(hass, hk_driver, mock_get_source_ip):
@ -20,13 +19,11 @@ async def test_humanify_homekit_changed_event(hass, hk_driver, mock_get_source_i
with patch("homeassistant.components.homekit.HomeKit"):
assert await async_setup_component(hass, "homekit", {"homekit": {}})
assert await async_setup_component(hass, "logbook", {})
entity_attr_cache = logbook.EntityAttributeCache(hass)
event1, event2 = list(
logbook.humanify(
event1, event2 = mock_humanify(
hass,
[
MockLazyEventPartialState(
MockRow(
EVENT_HOMEKIT_CHANGED,
{
ATTR_ENTITY_ID: "lock.front_door",
@ -34,7 +31,7 @@ async def test_humanify_homekit_changed_event(hass, hk_driver, mock_get_source_i
ATTR_SERVICE: "lock",
},
),
MockLazyEventPartialState(
MockRow(
EVENT_HOMEKIT_CHANGED,
{
ATTR_ENTITY_ID: "cover.window",
@ -44,9 +41,6 @@ async def test_humanify_homekit_changed_event(hass, hk_driver, mock_get_source_i
},
),
],
entity_attr_cache,
{},
)
)
assert event1["name"] == "HomeKit"

View File

@ -0,0 +1,58 @@
"""Tests for the logbook component."""
from __future__ import annotations
import json
from typing import Any
from homeassistant.components import logbook
from homeassistant.components.recorder.models import process_timestamp_to_utc_isoformat
from homeassistant.core import Context
from homeassistant.helpers.json import JSONEncoder
import homeassistant.util.dt as dt_util
class MockRow:
"""Minimal row mock."""
def __init__(
self,
event_type: str,
data: dict[str, Any] | None = None,
context: Context | None = None,
):
"""Init the fake row."""
self.event_type = event_type
self.shared_data = json.dumps(data, cls=JSONEncoder)
self.data = data
self.time_fired = dt_util.utcnow()
self.context_parent_id = context.parent_id if context else None
self.context_user_id = context.user_id if context else None
self.context_id = context.id if context else None
self.state = None
self.entity_id = None
@property
def time_fired_minute(self):
"""Minute the event was fired."""
return self.time_fired.minute
@property
def time_fired_isoformat(self):
"""Time event was fired in utc isoformat."""
return process_timestamp_to_utc_isoformat(self.time_fired)
def mock_humanify(hass_, rows):
"""Wrap humanify with mocked logbook objects."""
event_data_cache = {}
context_lookup = {}
entity_name_cache = logbook.EntityNameCache(hass_)
event_cache = logbook.EventCache(event_data_cache)
context_augmenter = logbook.ContextAugmenter(
context_lookup, entity_name_cache, {}, event_cache
)
return list(
logbook.humanify(
hass_, rows, entity_name_cache, event_cache, context_augmenter
),
)

View File

@ -4,6 +4,7 @@ import collections
from datetime import datetime, timedelta
from http import HTTPStatus
import json
from typing import Any
from unittest.mock import Mock, patch
import pytest
@ -41,6 +42,8 @@ from homeassistant.helpers.json import JSONEncoder
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from .common import mock_humanify
from tests.common import async_capture_events, mock_platform
from tests.components.recorder.common import (
async_recorder_block_till_done,
@ -212,16 +215,11 @@ def test_home_assistant_start_stop_grouped(hass_):
Events that are occurring in the same minute.
"""
entity_attr_cache = logbook.EntityAttributeCache(hass_)
entries = list(
logbook.humanify(
entries = mock_humanify(
hass_,
(
MockLazyEventPartialState(EVENT_HOMEASSISTANT_STOP),
MockLazyEventPartialState(EVENT_HOMEASSISTANT_START),
),
entity_attr_cache,
{},
MockRow(EVENT_HOMEASSISTANT_STOP),
MockRow(EVENT_HOMEASSISTANT_START),
),
)
@ -231,30 +229,17 @@ def test_home_assistant_start_stop_grouped(hass_):
)
def test_unsupported_attributes_in_cache_throws(hass):
"""Test unsupported attributes in cache."""
entity_attr_cache = logbook.EntityAttributeCache(hass)
event = MockLazyEventPartialState(EVENT_STATE_CHANGED)
with pytest.raises(ValueError):
entity_attr_cache.get("sensor.xyz", "not_supported", event)
def test_home_assistant_start(hass_):
"""Test if HA start is not filtered or converted into a restart."""
entity_id = "switch.bla"
pointA = dt_util.utcnow()
entity_attr_cache = logbook.EntityAttributeCache(hass_)
entries = list(
logbook.humanify(
entries = mock_humanify(
hass_,
(
MockLazyEventPartialState(EVENT_HOMEASSISTANT_START),
create_state_changed_event(pointA, entity_id, 10),
MockRow(EVENT_HOMEASSISTANT_START),
create_state_changed_event(pointA, entity_id, 10).row,
),
entity_attr_cache,
{},
)
)
assert len(entries) == 2
@ -267,13 +252,11 @@ def test_process_custom_logbook_entries(hass_):
name = "Nice name"
message = "has a custom entry"
entity_id = "sun.sun"
entity_attr_cache = logbook.EntityAttributeCache(hass_)
entries = list(
logbook.humanify(
entries = mock_humanify(
hass_,
(
MockLazyEventPartialState(
MockRow(
logbook.EVENT_LOGBOOK_ENTRY,
{
logbook.ATTR_NAME: name,
@ -282,9 +265,6 @@ def test_process_custom_logbook_entries(hass_):
},
),
),
entity_attr_cache,
{},
)
)
assert len(entries) == 1
@ -343,11 +323,13 @@ def create_state_changed_event_from_old_new(
"state_id",
"old_state_id",
"shared_attrs",
"shared_data",
],
)
row.event_type = EVENT_STATE_CHANGED
row.event_data = "{}"
row.shared_data = "{}"
row.attributes = attributes_json
row.shared_attrs = attributes_json
row.time_fired = event_time_fired
@ -1987,34 +1969,26 @@ def _assert_entry(
assert state == entry["state"]
class MockLazyEventPartialState(ha.Event):
"""Minimal mock of a Lazy event."""
class MockRow:
"""Minimal row mock."""
@property
def data_entity_id(self):
"""Lookup entity id."""
return self.data.get(ATTR_ENTITY_ID)
@property
def data_domain(self):
"""Lookup domain."""
return self.data.get(ATTR_DOMAIN)
def __init__(self, event_type: str, data: dict[str, Any] = None):
"""Init the fake row."""
self.event_type = event_type
self.shared_data = json.dumps(data, cls=JSONEncoder)
self.data = data
self.time_fired = dt_util.utcnow()
self.context_parent_id = None
self.context_user_id = None
self.context_id = None
self.state = None
self.entity_id = None
@property
def time_fired_minute(self):
"""Minute the event was fired."""
return self.time_fired.minute
@property
def context_user_id(self):
"""Context user id of event."""
return self.context.user_id
@property
def context_id(self):
"""Context id of event."""
return self.context.id
@property
def time_fired_isoformat(self):
"""Time event was fired in utc isoformat."""

View File

@ -6,7 +6,7 @@ from unittest.mock import Mock, patch
import pytest
from homeassistant.components import logbook, script
from homeassistant.components import script
from homeassistant.components.script import DOMAIN, EVENT_SCRIPT_STARTED
from homeassistant.const import (
ATTR_ENTITY_ID,
@ -41,7 +41,7 @@ from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.common import async_fire_time_changed, async_mock_service, mock_restore_cache
from tests.components.logbook.test_init import MockLazyEventPartialState
from tests.components.logbook.common import MockRow, mock_humanify
ENTITY_ID = "script.test"
@ -526,24 +526,19 @@ async def test_logbook_humanify_script_started_event(hass):
hass.config.components.add("recorder")
await async_setup_component(hass, DOMAIN, {})
await async_setup_component(hass, "logbook", {})
entity_attr_cache = logbook.EntityAttributeCache(hass)
event1, event2 = list(
logbook.humanify(
event1, event2 = mock_humanify(
hass,
[
MockLazyEventPartialState(
MockRow(
EVENT_SCRIPT_STARTED,
{ATTR_ENTITY_ID: "script.hello", ATTR_NAME: "Hello Script"},
),
MockLazyEventPartialState(
MockRow(
EVENT_SCRIPT_STARTED,
{ATTR_ENTITY_ID: "script.bye", ATTR_NAME: "Bye Script"},
),
],
entity_attr_cache,
{},
)
)
assert event1["name"] == "Hello Script"

View File

@ -1,5 +1,4 @@
"""The tests for Shelly logbook."""
from homeassistant.components import logbook
from homeassistant.components.shelly.const import (
ATTR_CHANNEL,
ATTR_CLICK_TYPE,
@ -10,7 +9,7 @@ from homeassistant.components.shelly.const import (
from homeassistant.const import ATTR_DEVICE_ID
from homeassistant.setup import async_setup_component
from tests.components.logbook.test_init import MockLazyEventPartialState
from tests.components.logbook.common import MockRow, mock_humanify
async def test_humanify_shelly_click_event_block_device(hass, coap_wrapper):
@ -18,13 +17,11 @@ async def test_humanify_shelly_click_event_block_device(hass, coap_wrapper):
assert coap_wrapper
hass.config.components.add("recorder")
assert await async_setup_component(hass, "logbook", {})
entity_attr_cache = logbook.EntityAttributeCache(hass)
event1, event2 = list(
logbook.humanify(
event1, event2 = mock_humanify(
hass,
[
MockLazyEventPartialState(
MockRow(
EVENT_SHELLY_CLICK,
{
ATTR_DEVICE_ID: coap_wrapper.device_id,
@ -33,7 +30,7 @@ async def test_humanify_shelly_click_event_block_device(hass, coap_wrapper):
ATTR_CHANNEL: 1,
},
),
MockLazyEventPartialState(
MockRow(
EVENT_SHELLY_CLICK,
{
ATTR_DEVICE_ID: "no_device_id",
@ -43,9 +40,6 @@ async def test_humanify_shelly_click_event_block_device(hass, coap_wrapper):
},
),
],
entity_attr_cache,
{},
)
)
assert event1["name"] == "Shelly"
@ -68,13 +62,11 @@ async def test_humanify_shelly_click_event_rpc_device(hass, rpc_wrapper):
assert rpc_wrapper
hass.config.components.add("recorder")
assert await async_setup_component(hass, "logbook", {})
entity_attr_cache = logbook.EntityAttributeCache(hass)
event1, event2 = list(
logbook.humanify(
event1, event2 = mock_humanify(
hass,
[
MockLazyEventPartialState(
MockRow(
EVENT_SHELLY_CLICK,
{
ATTR_DEVICE_ID: rpc_wrapper.device_id,
@ -83,7 +75,7 @@ async def test_humanify_shelly_click_event_rpc_device(hass, rpc_wrapper):
ATTR_CHANNEL: 1,
},
),
MockLazyEventPartialState(
MockRow(
EVENT_SHELLY_CLICK,
{
ATTR_DEVICE_ID: "no_device_id",
@ -93,9 +85,6 @@ async def test_humanify_shelly_click_event_rpc_device(hass, rpc_wrapper):
},
),
],
entity_attr_cache,
{},
)
)
assert event1["name"] == "Shelly"