Add support for selecting device_ids from the logbook (#72039)

Co-authored-by: Paulus Schoutsen <paulus@home-assistant.io>
This commit is contained in:
J. Nick Koston 2022-05-18 01:58:30 -05:00 committed by GitHub
parent 0dc12c70e3
commit c4fc84ec1e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 1112 additions and 470 deletions

View File

@ -50,7 +50,11 @@ from homeassistant.core import (
split_entity_id, split_entity_id,
) )
from homeassistant.exceptions import InvalidEntityFormatError from homeassistant.exceptions import InvalidEntityFormatError
from homeassistant.helpers import config_validation as cv, entity_registry as er from homeassistant.helpers import (
config_validation as cv,
device_registry as dr,
entity_registry as er,
)
from homeassistant.helpers.entityfilter import ( from homeassistant.helpers.entityfilter import (
INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA, INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA,
EntityFilter, EntityFilter,
@ -64,7 +68,8 @@ from homeassistant.helpers.typing import ConfigType
from homeassistant.loader import bind_hass from homeassistant.loader import bind_hass
import homeassistant.util.dt as dt_util import homeassistant.util.dt as dt_util
from .queries import PSUEDO_EVENT_STATE_CHANGED, statement_for_request from .queries import statement_for_request
from .queries.common import PSUEDO_EVENT_STATE_CHANGED
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@ -96,8 +101,11 @@ LOGBOOK_ENTRY_STATE = "state"
LOGBOOK_ENTRY_WHEN = "when" LOGBOOK_ENTRY_WHEN = "when"
ALL_EVENT_TYPES_EXCEPT_STATE_CHANGED = {EVENT_LOGBOOK_ENTRY, EVENT_CALL_SERVICE} ALL_EVENT_TYPES_EXCEPT_STATE_CHANGED = {EVENT_LOGBOOK_ENTRY, EVENT_CALL_SERVICE}
ENTITY_EVENTS_WITHOUT_CONFIG_ENTRY = {
SCRIPT_AUTOMATION_EVENTS = {EVENT_AUTOMATION_TRIGGERED, EVENT_SCRIPT_STARTED} EVENT_LOGBOOK_ENTRY,
EVENT_AUTOMATION_TRIGGERED,
EVENT_SCRIPT_STARTED,
}
LOG_MESSAGE_SCHEMA = vol.Schema( LOG_MESSAGE_SCHEMA = vol.Schema(
{ {
@ -209,12 +217,61 @@ async def _process_logbook_platform(
platform.async_describe_events(hass, _async_describe_event) platform.async_describe_events(hass, _async_describe_event)
def _async_determine_event_types(
hass: HomeAssistant, entity_ids: list[str] | None, device_ids: list[str] | None
) -> tuple[str, ...]:
"""Reduce the event types based on the entity ids and device ids."""
external_events: dict[
str, tuple[str, Callable[[LazyEventPartialState], dict[str, Any]]]
] = hass.data.get(DOMAIN, {})
if not entity_ids and not device_ids:
return (*ALL_EVENT_TYPES_EXCEPT_STATE_CHANGED, *external_events)
config_entry_ids: set[str] = set()
intrested_event_types: set[str] = set()
if entity_ids:
#
# Home Assistant doesn't allow firing events from
# entities so we have a limited list to check
#
# automations and scripts can refer to entities
# but they do not have a config entry so we need
# to add them.
#
# We also allow entity_ids to be recorded via
# manual logbook entries.
#
intrested_event_types |= ENTITY_EVENTS_WITHOUT_CONFIG_ENTRY
if device_ids:
dev_reg = dr.async_get(hass)
for device_id in device_ids:
if (device := dev_reg.async_get(device_id)) and device.config_entries:
config_entry_ids |= device.config_entries
interested_domains: set[str] = set()
for entry_id in config_entry_ids:
if entry := hass.config_entries.async_get_entry(entry_id):
interested_domains.add(entry.domain)
for external_event, domain_call in external_events.items():
if domain_call[0] in interested_domains:
intrested_event_types.add(external_event)
return tuple(
event_type
for event_type in (EVENT_LOGBOOK_ENTRY, *external_events)
if event_type in intrested_event_types
)
def _ws_formatted_get_events( def _ws_formatted_get_events(
hass: HomeAssistant, hass: HomeAssistant,
msg_id: int, msg_id: int,
start_day: dt, start_day: dt,
end_day: dt, end_day: dt,
event_types: tuple[str, ...],
ent_reg: er.EntityRegistry,
entity_ids: list[str] | None = None, entity_ids: list[str] | None = None,
device_ids: list[str] | None = None,
filters: Filters | None = None, filters: Filters | None = None,
entities_filter: EntityFilter | Callable[[str], bool] | None = None, entities_filter: EntityFilter | Callable[[str], bool] | None = None,
context_id: str | None = None, context_id: str | None = None,
@ -227,7 +284,10 @@ def _ws_formatted_get_events(
hass, hass,
start_day, start_day,
end_day, end_day,
event_types,
ent_reg,
entity_ids, entity_ids,
device_ids,
filters, filters,
entities_filter, entities_filter,
context_id, context_id,
@ -244,6 +304,7 @@ def _ws_formatted_get_events(
vol.Required("start_time"): str, vol.Required("start_time"): str,
vol.Optional("end_time"): str, vol.Optional("end_time"): str,
vol.Optional("entity_ids"): [str], vol.Optional("entity_ids"): [str],
vol.Optional("device_ids"): [str],
vol.Optional("context_id"): str, vol.Optional("context_id"): str,
} }
) )
@ -274,8 +335,11 @@ async def ws_get_events(
connection.send_result(msg["id"], []) connection.send_result(msg["id"], [])
return return
device_ids = msg.get("device_ids")
entity_ids = msg.get("entity_ids") entity_ids = msg.get("entity_ids")
context_id = msg.get("context_id") context_id = msg.get("context_id")
event_types = _async_determine_event_types(hass, entity_ids, device_ids)
ent_reg = er.async_get(hass)
connection.send_message( connection.send_message(
await get_instance(hass).async_add_executor_job( await get_instance(hass).async_add_executor_job(
@ -284,7 +348,10 @@ async def ws_get_events(
msg["id"], msg["id"],
start_time, start_time,
end_time, end_time,
event_types,
ent_reg,
entity_ids, entity_ids,
device_ids,
hass.data[LOGBOOK_FILTERS], hass.data[LOGBOOK_FILTERS],
hass.data[LOGBOOK_ENTITIES_FILTER], hass.data[LOGBOOK_ENTITIES_FILTER],
context_id, context_id,
@ -354,6 +421,9 @@ class LogbookView(HomeAssistantView):
"Can't combine entity with context_id", HTTPStatus.BAD_REQUEST "Can't combine entity with context_id", HTTPStatus.BAD_REQUEST
) )
event_types = _async_determine_event_types(hass, entity_ids, None)
ent_reg = er.async_get(hass)
def json_events() -> web.Response: def json_events() -> web.Response:
"""Fetch events and generate JSON.""" """Fetch events and generate JSON."""
return self.json( return self.json(
@ -361,7 +431,10 @@ class LogbookView(HomeAssistantView):
hass, hass,
start_day, start_day,
end_day, end_day,
event_types,
ent_reg,
entity_ids, entity_ids,
None,
self.filters, self.filters,
self.entities_filter, self.entities_filter,
context_id, context_id,
@ -487,7 +560,10 @@ def _get_events(
hass: HomeAssistant, hass: HomeAssistant,
start_day: dt, start_day: dt,
end_day: dt, end_day: dt,
event_types: tuple[str, ...],
ent_reg: er.EntityRegistry,
entity_ids: list[str] | None = None, entity_ids: list[str] | None = None,
device_ids: list[str] | None = None,
filters: Filters | None = None, filters: Filters | None = None,
entities_filter: EntityFilter | Callable[[str], bool] | None = None, entities_filter: EntityFilter | Callable[[str], bool] | None = None,
context_id: str | None = None, context_id: str | None = None,
@ -496,17 +572,13 @@ def _get_events(
) -> list[dict[str, Any]]: ) -> list[dict[str, Any]]:
"""Get events for a period of time.""" """Get events for a period of time."""
assert not ( assert not (
entity_ids and context_id context_id and (entity_ids or device_ids)
), "can't pass in both entity_ids and context_id" ), "can't pass in both context_id and (entity_ids or device_ids)"
external_events: dict[ external_events: dict[
str, tuple[str, Callable[[LazyEventPartialState], dict[str, Any]]] str, tuple[str, Callable[[LazyEventPartialState], dict[str, Any]]]
] = hass.data.get(DOMAIN, {}) ] = hass.data.get(DOMAIN, {})
event_types = (*ALL_EVENT_TYPES_EXCEPT_STATE_CHANGED, *external_events)
format_time = _row_time_fired_timestamp if timestamp else _row_time_fired_isoformat format_time = _row_time_fired_timestamp if timestamp else _row_time_fired_isoformat
entity_name_cache = EntityNameCache(hass) entity_name_cache = EntityNameCache(hass)
ent_reg = er.async_get(hass)
if entity_ids is not None: if entity_ids is not None:
entities_filter = generate_filter([], entity_ids, [], []) entities_filter = generate_filter([], entity_ids, [], [])
@ -529,7 +601,13 @@ def _get_events(
return query.yield_per(1024) # type: ignore[no-any-return] return query.yield_per(1024) # type: ignore[no-any-return]
stmt = statement_for_request( stmt = statement_for_request(
start_day, end_day, event_types, entity_ids, filters, context_id start_day,
end_day,
event_types,
entity_ids,
device_ids,
filters,
context_id,
) )
if _LOGGER.isEnabledFor(logging.DEBUG): if _LOGGER.isEnabledFor(logging.DEBUG):
_LOGGER.debug( _LOGGER.debug(
@ -668,12 +746,6 @@ def _row_event_data_extract(row: Row, extractor: re.Pattern) -> str | None:
return result.group(1) if result else None return result.group(1) if result else None
def _row_attributes_extract(row: Row, extractor: re.Pattern) -> str | None:
"""Extract from attributes row."""
result = extractor.search(row.shared_attrs or row.attributes or "")
return result.group(1) if result else None
def _row_time_fired_isoformat(row: Row) -> str: def _row_time_fired_isoformat(row: Row) -> str:
"""Convert the row timed_fired to isoformat.""" """Convert the row timed_fired to isoformat."""
return process_timestamp_to_utc_isoformat(row.time_fired or dt_util.utcnow()) return process_timestamp_to_utc_isoformat(row.time_fired or dt_util.utcnow())

View File

@ -1,451 +0,0 @@
"""Queries for logbook."""
from __future__ import annotations
from collections.abc import Iterable
from datetime import datetime as dt
import sqlalchemy
from sqlalchemy import JSON, lambda_stmt, select, type_coerce, union_all
from sqlalchemy.orm import Query, aliased
from sqlalchemy.sql.elements import ClauseList
from sqlalchemy.sql.expression import literal
from sqlalchemy.sql.lambdas import StatementLambdaElement
from sqlalchemy.sql.selectable import Select
from homeassistant.components.proximity import DOMAIN as PROXIMITY_DOMAIN
from homeassistant.components.recorder.filters import Filters
from homeassistant.components.recorder.models import (
ENTITY_ID_LAST_UPDATED_INDEX,
JSON_VARIENT_CAST,
LAST_UPDATED_INDEX,
EventData,
Events,
StateAttributes,
States,
)
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
ENTITY_ID_JSON_TEMPLATE = '%"entity_id":"{}"%'
CONTINUOUS_DOMAINS = {PROXIMITY_DOMAIN, SENSOR_DOMAIN}
CONTINUOUS_ENTITY_ID_LIKE = [f"{domain}.%" for domain in CONTINUOUS_DOMAINS]
UNIT_OF_MEASUREMENT_JSON = '"unit_of_measurement":'
UNIT_OF_MEASUREMENT_JSON_LIKE = f"%{UNIT_OF_MEASUREMENT_JSON}%"
OLD_STATE = aliased(States, name="old_state")
SHARED_ATTRS_JSON = type_coerce(
StateAttributes.shared_attrs.cast(JSON_VARIENT_CAST), JSON(none_as_null=True)
)
OLD_FORMAT_ATTRS_JSON = type_coerce(
States.attributes.cast(JSON_VARIENT_CAST), JSON(none_as_null=True)
)
PSUEDO_EVENT_STATE_CHANGED = None
# Since we don't store event_types and None
# and we don't store state_changed in events
# we use a NULL for state_changed events
# when we synthesize them from the states table
# since it avoids another column being sent
# in the payload
EVENT_COLUMNS = (
Events.event_id.label("event_id"),
Events.event_type.label("event_type"),
Events.event_data.label("event_data"),
Events.time_fired.label("time_fired"),
Events.context_id.label("context_id"),
Events.context_user_id.label("context_user_id"),
Events.context_parent_id.label("context_parent_id"),
)
STATE_COLUMNS = (
States.state_id.label("state_id"),
States.state.label("state"),
States.entity_id.label("entity_id"),
SHARED_ATTRS_JSON["icon"].as_string().label("icon"),
OLD_FORMAT_ATTRS_JSON["icon"].as_string().label("old_format_icon"),
)
EMPTY_STATE_COLUMNS = (
literal(value=None, type_=sqlalchemy.String).label("state_id"),
literal(value=None, type_=sqlalchemy.String).label("state"),
literal(value=None, type_=sqlalchemy.String).label("entity_id"),
literal(value=None, type_=sqlalchemy.String).label("icon"),
literal(value=None, type_=sqlalchemy.String).label("old_format_icon"),
)
EVENT_ROWS_NO_STATES = (
*EVENT_COLUMNS,
EventData.shared_data.label("shared_data"),
*EMPTY_STATE_COLUMNS,
)
# Virtual column to tell logbook if it should avoid processing
# the event as its only used to link contexts
CONTEXT_ONLY = literal("1").label("context_only")
NOT_CONTEXT_ONLY = literal(None).label("context_only")
def statement_for_request(
start_day: dt,
end_day: dt,
event_types: tuple[str, ...],
entity_ids: list[str] | None = None,
filters: Filters | None = None,
context_id: str | None = None,
) -> StatementLambdaElement:
"""Generate the logbook statement for a logbook request."""
# No entities: logbook sends everything for the timeframe
# limited by the context_id and the yaml configured filter
if not entity_ids:
entity_filter = filters.entity_filter() if filters else None
return _all_stmt(start_day, end_day, event_types, entity_filter, context_id)
# Multiple entities: logbook sends everything for the timeframe for the entities
#
# This is the least efficient query because we use
# like matching which means part of the query has to be built each
# time when the entity_ids are not in the cache
if len(entity_ids) > 1:
return _entities_stmt(start_day, end_day, event_types, entity_ids)
# Single entity: logbook sends everything for the timeframe for the entity
entity_id = entity_ids[0]
entity_like = ENTITY_ID_JSON_TEMPLATE.format(entity_id)
return _single_entity_stmt(start_day, end_day, event_types, entity_id, entity_like)
def _select_events_context_id_subquery(
start_day: dt,
end_day: dt,
event_types: tuple[str, ...],
) -> Select:
"""Generate the select for a context_id subquery."""
return (
select(Events.context_id)
.where((Events.time_fired > start_day) & (Events.time_fired < end_day))
.where(Events.event_type.in_(event_types))
.outerjoin(EventData, (Events.data_id == EventData.data_id))
)
def _select_entities_context_ids_sub_query(
start_day: dt,
end_day: dt,
event_types: tuple[str, ...],
entity_ids: list[str],
) -> Select:
"""Generate a subquery to find context ids for multiple entities."""
return select(
union_all(
_select_events_context_id_subquery(start_day, end_day, event_types).where(
_apply_event_entity_id_matchers(entity_ids)
),
_apply_entities_hints(select(States.context_id))
.filter((States.last_updated > start_day) & (States.last_updated < end_day))
.where(States.entity_id.in_(entity_ids)),
).c.context_id
)
def _select_events_context_only() -> Select:
"""Generate an events query that mark them as for context_only.
By marking them as context_only we know they are only for
linking context ids and we can avoid processing them.
"""
return select(*EVENT_ROWS_NO_STATES, CONTEXT_ONLY).outerjoin(
EventData, (Events.data_id == EventData.data_id)
)
def _entities_stmt(
start_day: dt,
end_day: dt,
event_types: tuple[str, ...],
entity_ids: list[str],
) -> StatementLambdaElement:
"""Generate a logbook query for multiple entities."""
stmt = lambda_stmt(
lambda: _select_events_without_states(start_day, end_day, event_types)
)
stmt = stmt.add_criteria(
lambda s: s.where(_apply_event_entity_id_matchers(entity_ids)).union_all(
_states_query_for_entity_ids(start_day, end_day, entity_ids),
_select_events_context_only().where(
Events.context_id.in_(
_select_entities_context_ids_sub_query(
start_day,
end_day,
event_types,
entity_ids,
)
)
),
),
# Since _apply_event_entity_id_matchers generates multiple
# like statements we need to use the entity_ids in the
# the cache key since the sql can change based on the
# likes.
track_on=(str(entity_ids),),
)
stmt += lambda s: s.order_by(Events.time_fired)
return stmt
def _select_entity_context_ids_sub_query(
start_day: dt,
end_day: dt,
event_types: tuple[str, ...],
entity_id: str,
entity_id_like: str,
) -> Select:
"""Generate a subquery to find context ids for a single entity."""
return select(
union_all(
_select_events_context_id_subquery(start_day, end_day, event_types).where(
Events.event_data.like(entity_id_like)
| EventData.shared_data.like(entity_id_like)
),
_apply_entities_hints(select(States.context_id))
.filter((States.last_updated > start_day) & (States.last_updated < end_day))
.where(States.entity_id == entity_id),
).c.context_id
)
def _single_entity_stmt(
start_day: dt,
end_day: dt,
event_types: tuple[str, ...],
entity_id: str,
entity_id_like: str,
) -> StatementLambdaElement:
"""Generate a logbook query for a single entity."""
stmt = lambda_stmt(
lambda: _select_events_without_states(start_day, end_day, event_types)
.where(
Events.event_data.like(entity_id_like)
| EventData.shared_data.like(entity_id_like)
)
.union_all(
_states_query_for_entity_id(start_day, end_day, entity_id),
_select_events_context_only().where(
Events.context_id.in_(
_select_entity_context_ids_sub_query(
start_day, end_day, event_types, entity_id, entity_id_like
)
)
),
)
.order_by(Events.time_fired)
)
return stmt
def _all_stmt(
start_day: dt,
end_day: dt,
event_types: tuple[str, ...],
entity_filter: ClauseList | None = None,
context_id: str | None = None,
) -> StatementLambdaElement:
"""Generate a logbook query for all entities."""
stmt = lambda_stmt(
lambda: _select_events_without_states(start_day, end_day, event_types)
)
if context_id is not None:
# Once all the old `state_changed` events
# are gone from the database remove the
# _legacy_select_events_context_id()
stmt += lambda s: s.where(Events.context_id == context_id).union_all(
_states_query_for_context_id(start_day, end_day, context_id),
_legacy_select_events_context_id(start_day, end_day, context_id),
)
elif entity_filter is not None:
stmt += lambda s: s.union_all(
_states_query_for_all(start_day, end_day).where(entity_filter)
)
else:
stmt += lambda s: s.union_all(_states_query_for_all(start_day, end_day))
stmt += lambda s: s.order_by(Events.time_fired)
return stmt
def _legacy_select_events_context_id(
start_day: dt, end_day: dt, context_id: str
) -> Select:
"""Generate a legacy events context id select that also joins states."""
# This can be removed once we no longer have event_ids in the states table
return (
select(
*EVENT_COLUMNS,
literal(value=None, type_=sqlalchemy.String).label("shared_data"),
*STATE_COLUMNS,
NOT_CONTEXT_ONLY,
)
.outerjoin(States, (Events.event_id == States.event_id))
.where(
(States.last_updated == States.last_changed) | States.last_changed.is_(None)
)
.where(_not_continuous_entity_matcher())
.outerjoin(
StateAttributes, (States.attributes_id == StateAttributes.attributes_id)
)
.where((Events.time_fired > start_day) & (Events.time_fired < end_day))
.where(Events.context_id == context_id)
)
def _select_events_without_states(
start_day: dt, end_day: dt, event_types: tuple[str, ...]
) -> Select:
"""Generate an events select that does not join states."""
return (
select(*EVENT_ROWS_NO_STATES, NOT_CONTEXT_ONLY)
.where((Events.time_fired > start_day) & (Events.time_fired < end_day))
.where(Events.event_type.in_(event_types))
.outerjoin(EventData, (Events.data_id == EventData.data_id))
)
def _states_query_for_context_id(start_day: dt, end_day: dt, context_id: str) -> Query:
return _apply_states_filters(_select_states(), start_day, end_day).where(
States.context_id == context_id
)
def _states_query_for_entity_id(start_day: dt, end_day: dt, entity_id: str) -> Query:
return _apply_states_filters(
_apply_entities_hints(_select_states()), start_day, end_day
).where(States.entity_id == entity_id)
def _states_query_for_entity_ids(
start_day: dt, end_day: dt, entity_ids: list[str]
) -> Query:
return _apply_states_filters(
_apply_entities_hints(_select_states()), start_day, end_day
).where(States.entity_id.in_(entity_ids))
def _states_query_for_all(start_day: dt, end_day: dt) -> Query:
return _apply_states_filters(_apply_all_hints(_select_states()), start_day, end_day)
def _select_states() -> Select:
"""Generate a states select that formats the states table as event rows."""
return select(
literal(value=None, type_=sqlalchemy.Text).label("event_id"),
# We use PSUEDO_EVENT_STATE_CHANGED aka None for
# state_changed events since it takes up less
# space in the response and every row has to be
# marked with the event_type
literal(value=PSUEDO_EVENT_STATE_CHANGED, type_=sqlalchemy.String).label(
"event_type"
),
literal(value=None, type_=sqlalchemy.Text).label("event_data"),
States.last_updated.label("time_fired"),
States.context_id.label("context_id"),
States.context_user_id.label("context_user_id"),
States.context_parent_id.label("context_parent_id"),
literal(value=None, type_=sqlalchemy.Text).label("shared_data"),
*STATE_COLUMNS,
NOT_CONTEXT_ONLY,
)
def _apply_all_hints(query: Query) -> Query:
"""Force mysql to use the right index on large selects."""
return query.with_hint(
States, f"FORCE INDEX ({LAST_UPDATED_INDEX})", dialect_name="mysql"
)
def _apply_entities_hints(query: Query) -> Query:
"""Force mysql to use the right index on large selects."""
return query.with_hint(
States, f"FORCE INDEX ({ENTITY_ID_LAST_UPDATED_INDEX})", dialect_name="mysql"
)
def _apply_states_filters(query: Query, start_day: dt, end_day: dt) -> Query:
return (
query.filter(
(States.last_updated > start_day) & (States.last_updated < end_day)
)
.outerjoin(OLD_STATE, (States.old_state_id == OLD_STATE.state_id))
.where(_missing_state_matcher())
.where(_not_continuous_entity_matcher())
.where(
(States.last_updated == States.last_changed) | States.last_changed.is_(None)
)
.outerjoin(
StateAttributes, (States.attributes_id == StateAttributes.attributes_id)
)
)
def _missing_state_matcher() -> sqlalchemy.and_:
# The below removes state change events that do not have
# and old_state or the old_state is missing (newly added entities)
# or the new_state is missing (removed entities)
return sqlalchemy.and_(
OLD_STATE.state_id.isnot(None),
(States.state != OLD_STATE.state),
States.state.isnot(None),
)
def _not_continuous_entity_matcher() -> sqlalchemy.or_:
"""Match non continuous entities."""
return sqlalchemy.or_(
_not_continuous_domain_matcher(),
sqlalchemy.and_(
_continuous_domain_matcher, _not_uom_attributes_matcher()
).self_group(),
)
def _not_continuous_domain_matcher() -> sqlalchemy.and_:
"""Match not continuous domains."""
return sqlalchemy.and_(
*[
~States.entity_id.like(entity_domain)
for entity_domain in CONTINUOUS_ENTITY_ID_LIKE
],
).self_group()
def _continuous_domain_matcher() -> sqlalchemy.or_:
"""Match continuous domains."""
return sqlalchemy.or_(
*[
States.entity_id.like(entity_domain)
for entity_domain in CONTINUOUS_ENTITY_ID_LIKE
],
).self_group()
def _not_uom_attributes_matcher() -> ClauseList:
"""Prefilter ATTR_UNIT_OF_MEASUREMENT as its much faster in sql."""
return ~StateAttributes.shared_attrs.like(
UNIT_OF_MEASUREMENT_JSON_LIKE
) | ~States.attributes.like(UNIT_OF_MEASUREMENT_JSON_LIKE)
def _apply_event_entity_id_matchers(entity_ids: Iterable[str]) -> sqlalchemy.or_:
"""Create matchers for the entity_id in the event_data."""
ors = []
for entity_id in entity_ids:
like = ENTITY_ID_JSON_TEMPLATE.format(entity_id)
ors.append(Events.event_data.like(like))
ors.append(EventData.shared_data.like(like))
return sqlalchemy.or_(*ors)

View File

@ -0,0 +1,70 @@
"""Queries for logbook."""
from __future__ import annotations
from datetime import datetime as dt
from sqlalchemy.sql.lambdas import StatementLambdaElement
from homeassistant.components.recorder.filters import Filters
from .all import all_stmt
from .devices import devices_stmt
from .entities import entities_stmt
from .entities_and_devices import entities_devices_stmt
def statement_for_request(
start_day: dt,
end_day: dt,
event_types: tuple[str, ...],
entity_ids: list[str] | None = None,
device_ids: list[str] | None = None,
filters: Filters | None = None,
context_id: str | None = None,
) -> StatementLambdaElement:
"""Generate the logbook statement for a logbook request."""
# No entities: logbook sends everything for the timeframe
# limited by the context_id and the yaml configured filter
if not entity_ids and not device_ids:
entity_filter = filters.entity_filter() if filters else None
return all_stmt(start_day, end_day, event_types, entity_filter, context_id)
# sqlalchemy caches object quoting, the
# json quotable ones must be a different
# object from the non-json ones to prevent
# sqlalchemy from quoting them incorrectly
# entities and devices: logbook sends everything for the timeframe for the entities and devices
if entity_ids and device_ids:
json_quotable_entity_ids = list(entity_ids)
json_quotable_device_ids = list(device_ids)
return entities_devices_stmt(
start_day,
end_day,
event_types,
entity_ids,
json_quotable_entity_ids,
json_quotable_device_ids,
)
# entities: logbook sends everything for the timeframe for the entities
if entity_ids:
json_quotable_entity_ids = list(entity_ids)
return entities_stmt(
start_day,
end_day,
event_types,
entity_ids,
json_quotable_entity_ids,
)
# devices: logbook sends everything for the timeframe for the devices
assert device_ids is not None
json_quotable_device_ids = list(device_ids)
return devices_stmt(
start_day,
end_day,
event_types,
json_quotable_device_ids,
)

View File

@ -0,0 +1,64 @@
"""All queries for logbook."""
from __future__ import annotations
from datetime import datetime as dt
from sqlalchemy import lambda_stmt
from sqlalchemy.orm import Query
from sqlalchemy.sql.elements import ClauseList
from sqlalchemy.sql.lambdas import StatementLambdaElement
from homeassistant.components.recorder.models import LAST_UPDATED_INDEX, Events, States
from .common import (
apply_states_filters,
legacy_select_events_context_id,
select_events_without_states,
select_states,
)
def all_stmt(
start_day: dt,
end_day: dt,
event_types: tuple[str, ...],
entity_filter: ClauseList | None = None,
context_id: str | None = None,
) -> StatementLambdaElement:
"""Generate a logbook query for all entities."""
stmt = lambda_stmt(
lambda: select_events_without_states(start_day, end_day, event_types)
)
if context_id is not None:
# Once all the old `state_changed` events
# are gone from the database remove the
# _legacy_select_events_context_id()
stmt += lambda s: s.where(Events.context_id == context_id).union_all(
_states_query_for_context_id(start_day, end_day, context_id),
legacy_select_events_context_id(start_day, end_day, context_id),
)
elif entity_filter is not None:
stmt += lambda s: s.union_all(
_states_query_for_all(start_day, end_day).where(entity_filter)
)
else:
stmt += lambda s: s.union_all(_states_query_for_all(start_day, end_day))
stmt += lambda s: s.order_by(Events.time_fired)
return stmt
def _states_query_for_all(start_day: dt, end_day: dt) -> Query:
return apply_states_filters(_apply_all_hints(select_states()), start_day, end_day)
def _apply_all_hints(query: Query) -> Query:
"""Force mysql to use the right index on large selects."""
return query.with_hint(
States, f"FORCE INDEX ({LAST_UPDATED_INDEX})", dialect_name="mysql"
)
def _states_query_for_context_id(start_day: dt, end_day: dt, context_id: str) -> Query:
return apply_states_filters(select_states(), start_day, end_day).where(
States.context_id == context_id
)

View File

@ -0,0 +1,286 @@
"""Queries for logbook."""
from __future__ import annotations
from collections.abc import Callable
from datetime import datetime as dt
import json
from typing import Any
import sqlalchemy
from sqlalchemy import JSON, select, type_coerce
from sqlalchemy.orm import Query, aliased
from sqlalchemy.sql.elements import ClauseList
from sqlalchemy.sql.expression import literal
from sqlalchemy.sql.selectable import Select
from homeassistant.components.proximity import DOMAIN as PROXIMITY_DOMAIN
from homeassistant.components.recorder.models import (
JSON_VARIENT_CAST,
JSONB_VARIENT_CAST,
EventData,
Events,
StateAttributes,
States,
)
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
CONTINUOUS_DOMAINS = {PROXIMITY_DOMAIN, SENSOR_DOMAIN}
CONTINUOUS_ENTITY_ID_LIKE = [f"{domain}.%" for domain in CONTINUOUS_DOMAINS]
UNIT_OF_MEASUREMENT_JSON = '"unit_of_measurement":'
UNIT_OF_MEASUREMENT_JSON_LIKE = f"%{UNIT_OF_MEASUREMENT_JSON}%"
OLD_STATE = aliased(States, name="old_state")
class JSONLiteral(JSON): # type: ignore[misc]
"""Teach SA how to literalize json."""
def literal_processor(self, dialect: str) -> Callable[[Any], str]:
"""Processor to convert a value to JSON."""
def process(value: Any) -> str:
"""Dump json."""
return json.dumps(value)
return process
EVENT_DATA_JSON = type_coerce(
EventData.shared_data.cast(JSONB_VARIENT_CAST), JSONLiteral(none_as_null=True)
)
OLD_FORMAT_EVENT_DATA_JSON = type_coerce(
Events.event_data.cast(JSONB_VARIENT_CAST), JSONLiteral(none_as_null=True)
)
SHARED_ATTRS_JSON = type_coerce(
StateAttributes.shared_attrs.cast(JSON_VARIENT_CAST), JSON(none_as_null=True)
)
OLD_FORMAT_ATTRS_JSON = type_coerce(
States.attributes.cast(JSON_VARIENT_CAST), JSON(none_as_null=True)
)
PSUEDO_EVENT_STATE_CHANGED = None
# Since we don't store event_types and None
# and we don't store state_changed in events
# we use a NULL for state_changed events
# when we synthesize them from the states table
# since it avoids another column being sent
# in the payload
EVENT_COLUMNS = (
Events.event_id.label("event_id"),
Events.event_type.label("event_type"),
Events.event_data.label("event_data"),
Events.time_fired.label("time_fired"),
Events.context_id.label("context_id"),
Events.context_user_id.label("context_user_id"),
Events.context_parent_id.label("context_parent_id"),
)
STATE_COLUMNS = (
States.state_id.label("state_id"),
States.state.label("state"),
States.entity_id.label("entity_id"),
SHARED_ATTRS_JSON["icon"].as_string().label("icon"),
OLD_FORMAT_ATTRS_JSON["icon"].as_string().label("old_format_icon"),
)
STATE_CONTEXT_ONLY_COLUMNS = (
States.state_id.label("state_id"),
States.state.label("state"),
States.entity_id.label("entity_id"),
literal(value=None, type_=sqlalchemy.String).label("icon"),
literal(value=None, type_=sqlalchemy.String).label("old_format_icon"),
)
EVENT_COLUMNS_FOR_STATE_SELECT = [
literal(value=None, type_=sqlalchemy.Text).label("event_id"),
# We use PSUEDO_EVENT_STATE_CHANGED aka None for
# state_changed events since it takes up less
# space in the response and every row has to be
# marked with the event_type
literal(value=PSUEDO_EVENT_STATE_CHANGED, type_=sqlalchemy.String).label(
"event_type"
),
literal(value=None, type_=sqlalchemy.Text).label("event_data"),
States.last_updated.label("time_fired"),
States.context_id.label("context_id"),
States.context_user_id.label("context_user_id"),
States.context_parent_id.label("context_parent_id"),
literal(value=None, type_=sqlalchemy.Text).label("shared_data"),
]
EMPTY_STATE_COLUMNS = (
literal(value=None, type_=sqlalchemy.String).label("state_id"),
literal(value=None, type_=sqlalchemy.String).label("state"),
literal(value=None, type_=sqlalchemy.String).label("entity_id"),
literal(value=None, type_=sqlalchemy.String).label("icon"),
literal(value=None, type_=sqlalchemy.String).label("old_format_icon"),
)
EVENT_ROWS_NO_STATES = (
*EVENT_COLUMNS,
EventData.shared_data.label("shared_data"),
*EMPTY_STATE_COLUMNS,
)
# Virtual column to tell logbook if it should avoid processing
# the event as its only used to link contexts
CONTEXT_ONLY = literal("1").label("context_only")
NOT_CONTEXT_ONLY = literal(None).label("context_only")
def select_events_context_id_subquery(
start_day: dt,
end_day: dt,
event_types: tuple[str, ...],
) -> Select:
"""Generate the select for a context_id subquery."""
return (
select(Events.context_id)
.where((Events.time_fired > start_day) & (Events.time_fired < end_day))
.where(Events.event_type.in_(event_types))
.outerjoin(EventData, (Events.data_id == EventData.data_id))
)
def select_events_context_only() -> Select:
"""Generate an events query that mark them as for context_only.
By marking them as context_only we know they are only for
linking context ids and we can avoid processing them.
"""
return select(*EVENT_ROWS_NO_STATES, CONTEXT_ONLY).outerjoin(
EventData, (Events.data_id == EventData.data_id)
)
def select_states_context_only() -> Select:
"""Generate an states query that mark them as for context_only.
By marking them as context_only we know they are only for
linking context ids and we can avoid processing them.
"""
return select(
*EVENT_COLUMNS_FOR_STATE_SELECT, *STATE_CONTEXT_ONLY_COLUMNS, CONTEXT_ONLY
)
def select_events_without_states(
start_day: dt, end_day: dt, event_types: tuple[str, ...]
) -> Select:
"""Generate an events select that does not join states."""
return (
select(*EVENT_ROWS_NO_STATES, NOT_CONTEXT_ONLY)
.where((Events.time_fired > start_day) & (Events.time_fired < end_day))
.where(Events.event_type.in_(event_types))
.outerjoin(EventData, (Events.data_id == EventData.data_id))
)
def select_states() -> Select:
"""Generate a states select that formats the states table as event rows."""
return select(
*EVENT_COLUMNS_FOR_STATE_SELECT,
*STATE_COLUMNS,
NOT_CONTEXT_ONLY,
)
def legacy_select_events_context_id(
start_day: dt, end_day: dt, context_id: str
) -> Select:
"""Generate a legacy events context id select that also joins states."""
# This can be removed once we no longer have event_ids in the states table
return (
select(
*EVENT_COLUMNS,
literal(value=None, type_=sqlalchemy.String).label("shared_data"),
*STATE_COLUMNS,
NOT_CONTEXT_ONLY,
)
.outerjoin(States, (Events.event_id == States.event_id))
.where(
(States.last_updated == States.last_changed) | States.last_changed.is_(None)
)
.where(_not_continuous_entity_matcher())
.outerjoin(
StateAttributes, (States.attributes_id == StateAttributes.attributes_id)
)
.where((Events.time_fired > start_day) & (Events.time_fired < end_day))
.where(Events.context_id == context_id)
)
def apply_states_filters(query: Query, start_day: dt, end_day: dt) -> Query:
"""Filter states by time range.
Filters states that do not have an old state or new state (added / removed)
Filters states that are in a continuous domain with a UOM.
Filters states that do not have matching last_updated and last_changed.
"""
return (
query.filter(
(States.last_updated > start_day) & (States.last_updated < end_day)
)
.outerjoin(OLD_STATE, (States.old_state_id == OLD_STATE.state_id))
.where(_missing_state_matcher())
.where(_not_continuous_entity_matcher())
.where(
(States.last_updated == States.last_changed) | States.last_changed.is_(None)
)
.outerjoin(
StateAttributes, (States.attributes_id == StateAttributes.attributes_id)
)
)
def _missing_state_matcher() -> sqlalchemy.and_:
# The below removes state change events that do not have
# and old_state or the old_state is missing (newly added entities)
# or the new_state is missing (removed entities)
return sqlalchemy.and_(
OLD_STATE.state_id.isnot(None),
(States.state != OLD_STATE.state),
States.state.isnot(None),
)
def _not_continuous_entity_matcher() -> sqlalchemy.or_:
"""Match non continuous entities."""
return sqlalchemy.or_(
_not_continuous_domain_matcher(),
sqlalchemy.and_(
_continuous_domain_matcher, _not_uom_attributes_matcher()
).self_group(),
)
def _not_continuous_domain_matcher() -> sqlalchemy.and_:
"""Match not continuous domains."""
return sqlalchemy.and_(
*[
~States.entity_id.like(entity_domain)
for entity_domain in CONTINUOUS_ENTITY_ID_LIKE
],
).self_group()
def _continuous_domain_matcher() -> sqlalchemy.or_:
"""Match continuous domains."""
return sqlalchemy.or_(
*[
States.entity_id.like(entity_domain)
for entity_domain in CONTINUOUS_ENTITY_ID_LIKE
],
).self_group()
def _not_uom_attributes_matcher() -> ClauseList:
"""Prefilter ATTR_UNIT_OF_MEASUREMENT as its much faster in sql."""
return ~StateAttributes.shared_attrs.like(
UNIT_OF_MEASUREMENT_JSON_LIKE
) | ~States.attributes.like(UNIT_OF_MEASUREMENT_JSON_LIKE)

View File

@ -0,0 +1,87 @@
"""Devices queries for logbook."""
from __future__ import annotations
from collections.abc import Iterable
from datetime import datetime as dt
from sqlalchemy import Column, lambda_stmt, select, union_all
from sqlalchemy.orm import Query
from sqlalchemy.sql.elements import ClauseList
from sqlalchemy.sql.lambdas import StatementLambdaElement
from sqlalchemy.sql.selectable import Select
from homeassistant.components.recorder.models import Events, States
from .common import (
EVENT_DATA_JSON,
select_events_context_id_subquery,
select_events_context_only,
select_events_without_states,
select_states_context_only,
)
DEVICE_ID_IN_EVENT: Column = EVENT_DATA_JSON["device_id"]
def _select_device_id_context_ids_sub_query(
start_day: dt,
end_day: dt,
event_types: tuple[str, ...],
json_quotable_device_ids: list[str],
) -> Select:
"""Generate a subquery to find context ids for multiple devices."""
return select(
union_all(
select_events_context_id_subquery(start_day, end_day, event_types).where(
apply_event_device_id_matchers(json_quotable_device_ids)
),
).c.context_id
)
def _apply_devices_context_union(
query: Query,
start_day: dt,
end_day: dt,
event_types: tuple[str, ...],
json_quotable_device_ids: list[str],
) -> StatementLambdaElement:
"""Generate a CTE to find the device context ids and a query to find linked row."""
devices_cte = _select_device_id_context_ids_sub_query(
start_day,
end_day,
event_types,
json_quotable_device_ids,
).cte()
return query.union_all(
select_events_context_only().where(Events.context_id.in_(devices_cte)),
select_states_context_only().where(States.context_id.in_(devices_cte)),
)
def devices_stmt(
start_day: dt,
end_day: dt,
event_types: tuple[str, ...],
json_quotable_device_ids: list[str],
) -> StatementLambdaElement:
"""Generate a logbook query for multiple devices."""
stmt = lambda_stmt(
lambda: _apply_devices_context_union(
select_events_without_states(start_day, end_day, event_types).where(
apply_event_device_id_matchers(json_quotable_device_ids)
),
start_day,
end_day,
event_types,
json_quotable_device_ids,
).order_by(Events.time_fired)
)
return stmt
def apply_event_device_id_matchers(
json_quotable_device_ids: Iterable[str],
) -> ClauseList:
"""Create matchers for the device_ids in the event_data."""
return DEVICE_ID_IN_EVENT.in_(json_quotable_device_ids)

View File

@ -0,0 +1,124 @@
"""Entities queries for logbook."""
from __future__ import annotations
from collections.abc import Iterable
from datetime import datetime as dt
import sqlalchemy
from sqlalchemy import Column, lambda_stmt, select, union_all
from sqlalchemy.orm import Query
from sqlalchemy.sql.lambdas import StatementLambdaElement
from sqlalchemy.sql.selectable import Select
from homeassistant.components.recorder.models import (
ENTITY_ID_LAST_UPDATED_INDEX,
Events,
States,
)
from .common import (
EVENT_DATA_JSON,
OLD_FORMAT_EVENT_DATA_JSON,
apply_states_filters,
select_events_context_id_subquery,
select_events_context_only,
select_events_without_states,
select_states,
select_states_context_only,
)
ENTITY_ID_IN_EVENT: Column = EVENT_DATA_JSON["entity_id"]
OLD_ENTITY_ID_IN_EVENT: Column = OLD_FORMAT_EVENT_DATA_JSON["entity_id"]
def _select_entities_context_ids_sub_query(
start_day: dt,
end_day: dt,
event_types: tuple[str, ...],
entity_ids: list[str],
json_quotable_entity_ids: list[str],
) -> Select:
"""Generate a subquery to find context ids for multiple entities."""
return select(
union_all(
select_events_context_id_subquery(start_day, end_day, event_types).where(
apply_event_entity_id_matchers(json_quotable_entity_ids)
),
apply_entities_hints(select(States.context_id))
.filter((States.last_updated > start_day) & (States.last_updated < end_day))
.where(States.entity_id.in_(entity_ids)),
).c.context_id
)
def _apply_entities_context_union(
query: Query,
start_day: dt,
end_day: dt,
event_types: tuple[str, ...],
entity_ids: list[str],
json_quotable_entity_ids: list[str],
) -> StatementLambdaElement:
"""Generate a CTE to find the entity and device context ids and a query to find linked row."""
entities_cte = _select_entities_context_ids_sub_query(
start_day,
end_day,
event_types,
entity_ids,
json_quotable_entity_ids,
).cte()
return query.union_all(
states_query_for_entity_ids(start_day, end_day, entity_ids),
select_events_context_only().where(Events.context_id.in_(entities_cte)),
select_states_context_only()
.where(States.entity_id.not_in(entity_ids))
.where(States.context_id.in_(entities_cte)),
)
def entities_stmt(
start_day: dt,
end_day: dt,
event_types: tuple[str, ...],
entity_ids: list[str],
json_quotable_entity_ids: list[str],
) -> StatementLambdaElement:
"""Generate a logbook query for multiple entities."""
assert json_quotable_entity_ids is not None
return lambda_stmt(
lambda: _apply_entities_context_union(
select_events_without_states(start_day, end_day, event_types).where(
apply_event_entity_id_matchers(json_quotable_entity_ids)
),
start_day,
end_day,
event_types,
entity_ids,
json_quotable_entity_ids,
).order_by(Events.time_fired)
)
def states_query_for_entity_ids(
start_day: dt, end_day: dt, entity_ids: list[str]
) -> Query:
"""Generate a select for states from the States table for specific entities."""
return apply_states_filters(
apply_entities_hints(select_states()), start_day, end_day
).where(States.entity_id.in_(entity_ids))
def apply_event_entity_id_matchers(
json_quotable_entity_ids: Iterable[str],
) -> sqlalchemy.or_:
"""Create matchers for the entity_id in the event_data."""
return ENTITY_ID_IN_EVENT.in_(
json_quotable_entity_ids
) | OLD_ENTITY_ID_IN_EVENT.in_(json_quotable_entity_ids)
def apply_entities_hints(query: Query) -> Query:
"""Force mysql to use the right index on large selects."""
return query.with_hint(
States, f"FORCE INDEX ({ENTITY_ID_LAST_UPDATED_INDEX})", dialect_name="mysql"
)

View File

@ -0,0 +1,111 @@
"""Entities and Devices queries for logbook."""
from __future__ import annotations
from collections.abc import Iterable
from datetime import datetime as dt
import sqlalchemy
from sqlalchemy import lambda_stmt, select, union_all
from sqlalchemy.orm import Query
from sqlalchemy.sql.lambdas import StatementLambdaElement
from sqlalchemy.sql.selectable import Select
from homeassistant.components.recorder.models import Events, States
from .common import (
select_events_context_id_subquery,
select_events_context_only,
select_events_without_states,
select_states_context_only,
)
from .devices import apply_event_device_id_matchers
from .entities import (
apply_entities_hints,
apply_event_entity_id_matchers,
states_query_for_entity_ids,
)
def _select_entities_device_id_context_ids_sub_query(
start_day: dt,
end_day: dt,
event_types: tuple[str, ...],
entity_ids: list[str],
json_quotable_entity_ids: list[str],
json_quotable_device_ids: list[str],
) -> Select:
"""Generate a subquery to find context ids for multiple entities and multiple devices."""
return select(
union_all(
select_events_context_id_subquery(start_day, end_day, event_types).where(
_apply_event_entity_id_device_id_matchers(
json_quotable_entity_ids, json_quotable_device_ids
)
),
apply_entities_hints(select(States.context_id))
.filter((States.last_updated > start_day) & (States.last_updated < end_day))
.where(States.entity_id.in_(entity_ids)),
).c.context_id
)
def _apply_entities_devices_context_union(
query: Query,
start_day: dt,
end_day: dt,
event_types: tuple[str, ...],
entity_ids: list[str],
json_quotable_entity_ids: list[str],
json_quotable_device_ids: list[str],
) -> StatementLambdaElement:
devices_entities_cte = _select_entities_device_id_context_ids_sub_query(
start_day,
end_day,
event_types,
entity_ids,
json_quotable_entity_ids,
json_quotable_device_ids,
).cte()
return query.union_all(
states_query_for_entity_ids(start_day, end_day, entity_ids),
select_events_context_only().where(Events.context_id.in_(devices_entities_cte)),
select_states_context_only()
.where(States.entity_id.not_in(entity_ids))
.where(States.context_id.in_(devices_entities_cte)),
)
def entities_devices_stmt(
start_day: dt,
end_day: dt,
event_types: tuple[str, ...],
entity_ids: list[str],
json_quotable_entity_ids: list[str],
json_quotable_device_ids: list[str],
) -> StatementLambdaElement:
"""Generate a logbook query for multiple entities."""
stmt = lambda_stmt(
lambda: _apply_entities_devices_context_union(
select_events_without_states(start_day, end_day, event_types).where(
_apply_event_entity_id_device_id_matchers(
json_quotable_entity_ids, json_quotable_device_ids
)
),
start_day,
end_day,
event_types,
entity_ids,
json_quotable_entity_ids,
json_quotable_device_ids,
).order_by(Events.time_fired)
)
return stmt
def _apply_event_entity_id_device_id_matchers(
json_quotable_entity_ids: Iterable[str], json_quotable_device_ids: Iterable[str]
) -> sqlalchemy.or_:
"""Create matchers for the device_id and entity_id in the event_data."""
return apply_event_entity_id_matchers(
json_quotable_entity_ids
) | apply_event_device_id_matchers(json_quotable_device_ids)

View File

@ -105,6 +105,9 @@ class FAST_PYSQLITE_DATETIME(sqlite.DATETIME): # type: ignore[misc]
JSON_VARIENT_CAST = Text().with_variant( JSON_VARIENT_CAST = Text().with_variant(
postgresql.JSON(none_as_null=True), "postgresql" postgresql.JSON(none_as_null=True), "postgresql"
) )
JSONB_VARIENT_CAST = Text().with_variant(
postgresql.JSONB(none_as_null=True), "postgresql"
)
DATETIME_TYPE = ( DATETIME_TYPE = (
DateTime(timezone=True) DateTime(timezone=True)
.with_variant(mysql.DATETIME(timezone=True, fsp=6), "mysql") .with_variant(mysql.DATETIME(timezone=True, fsp=6), "mysql")

View File

@ -5,6 +5,7 @@ import collections
from datetime import datetime, timedelta from datetime import datetime, timedelta
from http import HTTPStatus from http import HTTPStatus
import json import json
from typing import Callable
from unittest.mock import Mock, patch from unittest.mock import Mock, patch
import pytest import pytest
@ -30,11 +31,13 @@ from homeassistant.const import (
EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STARTED, EVENT_HOMEASSISTANT_STARTED,
EVENT_HOMEASSISTANT_STOP, EVENT_HOMEASSISTANT_STOP,
EVENT_LOGBOOK_ENTRY,
STATE_OFF, STATE_OFF,
STATE_ON, STATE_ON,
) )
import homeassistant.core as ha import homeassistant.core as ha
from homeassistant.helpers import entity_registry as er from homeassistant.core import Event, HomeAssistant
from homeassistant.helpers import device_registry, entity_registry as er
from homeassistant.helpers.entityfilter import CONF_ENTITY_GLOBS from homeassistant.helpers.entityfilter import CONF_ENTITY_GLOBS
from homeassistant.helpers.json import JSONEncoder from homeassistant.helpers.json import JSONEncoder
from homeassistant.setup import async_setup_component from homeassistant.setup import async_setup_component
@ -42,7 +45,7 @@ import homeassistant.util.dt as dt_util
from .common import MockRow, mock_humanify from .common import MockRow, mock_humanify
from tests.common import async_capture_events, mock_platform from tests.common import MockConfigEntry, async_capture_events, mock_platform
from tests.components.recorder.common import ( from tests.components.recorder.common import (
async_recorder_block_till_done, async_recorder_block_till_done,
async_wait_recording_done, async_wait_recording_done,
@ -92,12 +95,15 @@ async def test_service_call_create_logbook_entry(hass_):
# Our service call will unblock when the event listeners have been # Our service call will unblock when the event listeners have been
# scheduled. This means that they may not have been processed yet. # scheduled. This means that they may not have been processed yet.
await async_wait_recording_done(hass_) await async_wait_recording_done(hass_)
ent_reg = er.async_get(hass_)
events = list( events = list(
logbook._get_events( logbook._get_events(
hass_, hass_,
dt_util.utcnow() - timedelta(hours=1), dt_util.utcnow() - timedelta(hours=1),
dt_util.utcnow() + timedelta(hours=1), dt_util.utcnow() + timedelta(hours=1),
(EVENT_LOGBOOK_ENTRY,),
ent_reg,
) )
) )
assert len(events) == 2 assert len(events) == 2
@ -131,12 +137,15 @@ async def test_service_call_create_logbook_entry_invalid_entity_id(hass, recorde
}, },
) )
await async_wait_recording_done(hass) await async_wait_recording_done(hass)
ent_reg = er.async_get(hass)
events = list( events = list(
logbook._get_events( logbook._get_events(
hass, hass,
dt_util.utcnow() - timedelta(hours=1), dt_util.utcnow() - timedelta(hours=1),
dt_util.utcnow() + timedelta(hours=1), dt_util.utcnow() + timedelta(hours=1),
(EVENT_LOGBOOK_ENTRY,),
ent_reg,
) )
) )
assert len(events) == 1 assert len(events) == 1
@ -2431,3 +2440,270 @@ async def test_get_events_bad_end_time(hass, hass_ws_client, recorder_mock):
response = await client.receive_json() response = await client.receive_json()
assert not response["success"] assert not response["success"]
assert response["error"]["code"] == "invalid_end_time" assert response["error"]["code"] == "invalid_end_time"
async def test_get_events_with_device_ids(hass, hass_ws_client, recorder_mock):
"""Test logbook get_events for device ids."""
now = dt_util.utcnow()
await asyncio.gather(
*[
async_setup_component(hass, comp, {})
for comp in ("homeassistant", "logbook")
]
)
entry = MockConfigEntry(domain="test", data={"first": True}, options=None)
entry.add_to_hass(hass)
dev_reg = device_registry.async_get(hass)
device = dev_reg.async_get_or_create(
config_entry_id=entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("bridgeid", "0123")},
sw_version="sw-version",
name="device name",
manufacturer="manufacturer",
model="model",
suggested_area="Game Room",
)
class MockLogbookPlatform:
"""Mock a logbook platform."""
@ha.callback
def async_describe_events(
hass: HomeAssistant,
async_describe_event: Callable[
[str, str, Callable[[Event], dict[str, str]]], None
],
) -> None:
"""Describe logbook events."""
@ha.callback
def async_describe_test_event(event: Event) -> dict[str, str]:
"""Describe mock logbook event."""
return {
"name": "device name",
"message": "is on fire",
}
async_describe_event("test", "mock_event", async_describe_test_event)
await logbook._process_logbook_platform(hass, "test", MockLogbookPlatform)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.bus.async_fire("mock_event", {"device_id": device.id})
hass.states.async_set("light.kitchen", STATE_OFF)
await hass.async_block_till_done()
hass.states.async_set("light.kitchen", STATE_ON, {"brightness": 100})
await hass.async_block_till_done()
hass.states.async_set("light.kitchen", STATE_ON, {"brightness": 200})
await hass.async_block_till_done()
hass.states.async_set("light.kitchen", STATE_ON, {"brightness": 300})
await hass.async_block_till_done()
hass.states.async_set("light.kitchen", STATE_ON, {"brightness": 400})
await hass.async_block_till_done()
context = ha.Context(
id="ac5bd62de45711eaaeb351041eec8dd9",
user_id="b400facee45711eaa9308bfd3d19e474",
)
hass.states.async_set("light.kitchen", STATE_OFF, context=context)
await hass.async_block_till_done()
await async_wait_recording_done(hass)
client = await hass_ws_client()
await client.send_json(
{
"id": 1,
"type": "logbook/get_events",
"start_time": now.isoformat(),
"device_ids": [device.id],
}
)
response = await client.receive_json()
assert response["success"]
assert response["id"] == 1
results = response["result"]
assert len(results) == 1
assert results[0]["name"] == "device name"
assert results[0]["message"] == "is on fire"
assert isinstance(results[0]["when"], float)
await client.send_json(
{
"id": 2,
"type": "logbook/get_events",
"start_time": now.isoformat(),
"entity_ids": ["light.kitchen"],
"device_ids": [device.id],
}
)
response = await client.receive_json()
assert response["success"]
assert response["id"] == 2
results = response["result"]
assert results[0]["entity_id"] == "light.kitchen"
assert results[0]["state"] == "on"
assert results[1]["entity_id"] == "light.kitchen"
assert results[1]["state"] == "off"
await client.send_json(
{
"id": 3,
"type": "logbook/get_events",
"start_time": now.isoformat(),
}
)
response = await client.receive_json()
assert response["success"]
assert response["id"] == 3
results = response["result"]
assert len(results) == 4
assert results[0]["message"] == "started"
assert results[1]["name"] == "device name"
assert results[1]["message"] == "is on fire"
assert isinstance(results[1]["when"], float)
assert results[2]["entity_id"] == "light.kitchen"
assert results[2]["state"] == "on"
assert isinstance(results[2]["when"], float)
assert results[3]["entity_id"] == "light.kitchen"
assert results[3]["state"] == "off"
assert isinstance(results[3]["when"], float)
async def test_logbook_select_entities_context_id(hass, recorder_mock, hass_client):
"""Test the logbook view with end_time and entity with automations and scripts."""
await asyncio.gather(
*[
async_setup_component(hass, comp, {})
for comp in ("homeassistant", "logbook", "automation", "script")
]
)
await async_recorder_block_till_done(hass)
context = ha.Context(
id="ac5bd62de45711eaaeb351041eec8dd9",
user_id="b400facee45711eaa9308bfd3d19e474",
)
# An Automation
automation_entity_id_test = "automation.alarm"
hass.bus.async_fire(
EVENT_AUTOMATION_TRIGGERED,
{ATTR_NAME: "Mock automation", ATTR_ENTITY_ID: automation_entity_id_test},
context=context,
)
hass.bus.async_fire(
EVENT_SCRIPT_STARTED,
{ATTR_NAME: "Mock script", ATTR_ENTITY_ID: "script.mock_script"},
context=context,
)
hass.states.async_set(
automation_entity_id_test,
STATE_ON,
{ATTR_FRIENDLY_NAME: "Alarm Automation"},
context=context,
)
entity_id_test = "alarm_control_panel.area_001"
hass.states.async_set(entity_id_test, STATE_OFF, context=context)
await hass.async_block_till_done()
hass.states.async_set(entity_id_test, STATE_ON, context=context)
await hass.async_block_till_done()
entity_id_second = "alarm_control_panel.area_002"
hass.states.async_set(entity_id_second, STATE_OFF, context=context)
await hass.async_block_till_done()
hass.states.async_set(entity_id_second, STATE_ON, context=context)
await hass.async_block_till_done()
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
entity_id_third = "alarm_control_panel.area_003"
logbook.async_log_entry(
hass,
"mock_name",
"mock_message",
"alarm_control_panel",
entity_id_third,
context,
)
await hass.async_block_till_done()
logbook.async_log_entry(
hass,
"mock_name",
"mock_message",
"homeassistant",
None,
context,
)
await hass.async_block_till_done()
# A service call
light_turn_off_service_context = ha.Context(
id="9c5bd62de45711eaaeb351041eec8dd9",
user_id="9400facee45711eaa9308bfd3d19e474",
)
hass.states.async_set("light.switch", STATE_ON)
await hass.async_block_till_done()
hass.bus.async_fire(
EVENT_CALL_SERVICE,
{
ATTR_DOMAIN: "light",
ATTR_SERVICE: "turn_off",
ATTR_ENTITY_ID: "light.switch",
},
context=light_turn_off_service_context,
)
await hass.async_block_till_done()
hass.states.async_set(
"light.switch", STATE_OFF, context=light_turn_off_service_context
)
await async_wait_recording_done(hass)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries with filter by end_time
end_time = start + timedelta(hours=24)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity={entity_id_test},{entity_id_second},{entity_id_third},light.switch"
)
assert response.status == HTTPStatus.OK
json_dict = await response.json()
assert json_dict[0]["entity_id"] == entity_id_test
assert json_dict[0]["context_event_type"] == "automation_triggered"
assert json_dict[0]["context_entity_id"] == "automation.alarm"
assert json_dict[0]["context_entity_id_name"] == "Alarm Automation"
assert json_dict[0]["context_user_id"] == "b400facee45711eaa9308bfd3d19e474"
assert json_dict[1]["entity_id"] == entity_id_second
assert json_dict[1]["context_event_type"] == "automation_triggered"
assert json_dict[1]["context_entity_id"] == "automation.alarm"
assert json_dict[1]["context_entity_id_name"] == "Alarm Automation"
assert json_dict[1]["context_user_id"] == "b400facee45711eaa9308bfd3d19e474"
assert json_dict[2]["entity_id"] == "alarm_control_panel.area_003"
assert json_dict[2]["context_event_type"] == "automation_triggered"
assert json_dict[2]["context_entity_id"] == "automation.alarm"
assert json_dict[2]["domain"] == "alarm_control_panel"
assert json_dict[2]["context_entity_id_name"] == "Alarm Automation"
assert json_dict[2]["context_user_id"] == "b400facee45711eaa9308bfd3d19e474"
assert json_dict[3]["entity_id"] == "light.switch"
assert json_dict[3]["context_event_type"] == "call_service"
assert json_dict[3]["context_domain"] == "light"
assert json_dict[3]["context_service"] == "turn_off"
assert json_dict[3]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474"