mirror of
https://github.com/home-assistant/core.git
synced 2025-07-23 21:27:38 +00:00
Add json decode caching to logbook (#71080)
This commit is contained in:
parent
9b10658d01
commit
b8442d9340
@ -139,6 +139,7 @@ homeassistant.components.lcn.*
|
|||||||
homeassistant.components.light.*
|
homeassistant.components.light.*
|
||||||
homeassistant.components.local_ip.*
|
homeassistant.components.local_ip.*
|
||||||
homeassistant.components.lock.*
|
homeassistant.components.lock.*
|
||||||
|
homeassistant.components.logbook.*
|
||||||
homeassistant.components.lookin.*
|
homeassistant.components.lookin.*
|
||||||
homeassistant.components.luftdaten.*
|
homeassistant.components.luftdaten.*
|
||||||
homeassistant.components.mailbox.*
|
homeassistant.components.mailbox.*
|
||||||
|
@ -1,16 +1,18 @@
|
|||||||
"""Event parser and human readable log generator."""
|
"""Event parser and human readable log generator."""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from collections.abc import Iterable
|
from collections.abc import Callable, Generator, Iterable
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
from datetime import datetime as dt, timedelta
|
from datetime import datetime as dt, timedelta
|
||||||
from http import HTTPStatus
|
from http import HTTPStatus
|
||||||
from itertools import groupby
|
from itertools import groupby
|
||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
from typing import Any
|
from typing import Any, cast
|
||||||
|
|
||||||
|
from aiohttp import web
|
||||||
import sqlalchemy
|
import sqlalchemy
|
||||||
|
from sqlalchemy.engine.row import Row
|
||||||
from sqlalchemy.orm import aliased
|
from sqlalchemy.orm import aliased
|
||||||
from sqlalchemy.orm.query import Query
|
from sqlalchemy.orm.query import Query
|
||||||
from sqlalchemy.orm.session import Session
|
from sqlalchemy.orm.session import Session
|
||||||
@ -19,7 +21,10 @@ import voluptuous as vol
|
|||||||
|
|
||||||
from homeassistant.components import frontend
|
from homeassistant.components import frontend
|
||||||
from homeassistant.components.automation import EVENT_AUTOMATION_TRIGGERED
|
from homeassistant.components.automation import EVENT_AUTOMATION_TRIGGERED
|
||||||
from homeassistant.components.history import sqlalchemy_filter_from_include_exclude_conf
|
from homeassistant.components.history import (
|
||||||
|
Filters,
|
||||||
|
sqlalchemy_filter_from_include_exclude_conf,
|
||||||
|
)
|
||||||
from homeassistant.components.http import HomeAssistantView
|
from homeassistant.components.http import HomeAssistantView
|
||||||
from homeassistant.components.recorder import get_instance
|
from homeassistant.components.recorder import get_instance
|
||||||
from homeassistant.components.recorder.models import (
|
from homeassistant.components.recorder.models import (
|
||||||
@ -34,7 +39,6 @@ from homeassistant.const import (
|
|||||||
ATTR_DOMAIN,
|
ATTR_DOMAIN,
|
||||||
ATTR_ENTITY_ID,
|
ATTR_ENTITY_ID,
|
||||||
ATTR_FRIENDLY_NAME,
|
ATTR_FRIENDLY_NAME,
|
||||||
ATTR_ICON,
|
|
||||||
ATTR_NAME,
|
ATTR_NAME,
|
||||||
ATTR_SERVICE,
|
ATTR_SERVICE,
|
||||||
EVENT_CALL_SERVICE,
|
EVENT_CALL_SERVICE,
|
||||||
@ -45,6 +49,8 @@ from homeassistant.const import (
|
|||||||
)
|
)
|
||||||
from homeassistant.core import (
|
from homeassistant.core import (
|
||||||
DOMAIN as HA_DOMAIN,
|
DOMAIN as HA_DOMAIN,
|
||||||
|
Context,
|
||||||
|
Event,
|
||||||
HomeAssistant,
|
HomeAssistant,
|
||||||
ServiceCall,
|
ServiceCall,
|
||||||
callback,
|
callback,
|
||||||
@ -54,6 +60,7 @@ from homeassistant.exceptions import InvalidEntityFormatError
|
|||||||
import homeassistant.helpers.config_validation as cv
|
import homeassistant.helpers.config_validation as cv
|
||||||
from homeassistant.helpers.entityfilter import (
|
from homeassistant.helpers.entityfilter import (
|
||||||
INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA,
|
INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA,
|
||||||
|
EntityFilter,
|
||||||
convert_include_exclude_filter,
|
convert_include_exclude_filter,
|
||||||
generate_filter,
|
generate_filter,
|
||||||
)
|
)
|
||||||
@ -65,6 +72,7 @@ from homeassistant.loader import bind_hass
|
|||||||
import homeassistant.util.dt as dt_util
|
import homeassistant.util.dt as dt_util
|
||||||
|
|
||||||
ENTITY_ID_JSON_TEMPLATE = '%"entity_id":"{}"%'
|
ENTITY_ID_JSON_TEMPLATE = '%"entity_id":"{}"%'
|
||||||
|
FRIENDLY_NAME_JSON_EXTRACT = re.compile('"friendly_name": ?"([^"]+)"')
|
||||||
ENTITY_ID_JSON_EXTRACT = re.compile('"entity_id": ?"([^"]+)"')
|
ENTITY_ID_JSON_EXTRACT = re.compile('"entity_id": ?"([^"]+)"')
|
||||||
DOMAIN_JSON_EXTRACT = re.compile('"domain": ?"([^"]+)"')
|
DOMAIN_JSON_EXTRACT = re.compile('"domain": ?"([^"]+)"')
|
||||||
ICON_JSON_EXTRACT = re.compile('"icon": ?"([^"]+)"')
|
ICON_JSON_EXTRACT = re.compile('"icon": ?"([^"]+)"')
|
||||||
@ -111,7 +119,7 @@ EVENT_COLUMNS = [
|
|||||||
Events.context_parent_id,
|
Events.context_parent_id,
|
||||||
]
|
]
|
||||||
|
|
||||||
SCRIPT_AUTOMATION_EVENTS = [EVENT_AUTOMATION_TRIGGERED, EVENT_SCRIPT_STARTED]
|
SCRIPT_AUTOMATION_EVENTS = {EVENT_AUTOMATION_TRIGGERED, EVENT_SCRIPT_STARTED}
|
||||||
|
|
||||||
LOG_MESSAGE_SCHEMA = vol.Schema(
|
LOG_MESSAGE_SCHEMA = vol.Schema(
|
||||||
{
|
{
|
||||||
@ -124,14 +132,28 @@ LOG_MESSAGE_SCHEMA = vol.Schema(
|
|||||||
|
|
||||||
|
|
||||||
@bind_hass
|
@bind_hass
|
||||||
def log_entry(hass, name, message, domain=None, entity_id=None, context=None):
|
def log_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
name: str,
|
||||||
|
message: str,
|
||||||
|
domain: str | None = None,
|
||||||
|
entity_id: str | None = None,
|
||||||
|
context: Context | None = None,
|
||||||
|
) -> None:
|
||||||
"""Add an entry to the logbook."""
|
"""Add an entry to the logbook."""
|
||||||
hass.add_job(async_log_entry, hass, name, message, domain, entity_id, context)
|
hass.add_job(async_log_entry, hass, name, message, domain, entity_id, context)
|
||||||
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
@bind_hass
|
@bind_hass
|
||||||
def async_log_entry(hass, name, message, domain=None, entity_id=None, context=None):
|
def async_log_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
name: str,
|
||||||
|
message: str,
|
||||||
|
domain: str | None = None,
|
||||||
|
entity_id: str | None = None,
|
||||||
|
context: Context | None = None,
|
||||||
|
) -> None:
|
||||||
"""Add an entry to the logbook."""
|
"""Add an entry to the logbook."""
|
||||||
data = {ATTR_NAME: name, ATTR_MESSAGE: message}
|
data = {ATTR_NAME: name, ATTR_MESSAGE: message}
|
||||||
|
|
||||||
@ -184,11 +206,17 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
async def _process_logbook_platform(hass, domain, platform):
|
async def _process_logbook_platform(
|
||||||
|
hass: HomeAssistant, domain: str, platform: Any
|
||||||
|
) -> None:
|
||||||
"""Process a logbook platform."""
|
"""Process a logbook platform."""
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _async_describe_event(domain, event_name, describe_callback):
|
def _async_describe_event(
|
||||||
|
domain: str,
|
||||||
|
event_name: str,
|
||||||
|
describe_callback: Callable[[Event], dict[str, Any]],
|
||||||
|
) -> None:
|
||||||
"""Teach logbook how to describe a new event."""
|
"""Teach logbook how to describe a new event."""
|
||||||
hass.data[DOMAIN][event_name] = (domain, describe_callback)
|
hass.data[DOMAIN][event_name] = (domain, describe_callback)
|
||||||
|
|
||||||
@ -202,41 +230,51 @@ class LogbookView(HomeAssistantView):
|
|||||||
name = "api:logbook"
|
name = "api:logbook"
|
||||||
extra_urls = ["/api/logbook/{datetime}"]
|
extra_urls = ["/api/logbook/{datetime}"]
|
||||||
|
|
||||||
def __init__(self, config, filters, entities_filter):
|
def __init__(
|
||||||
|
self,
|
||||||
|
config: dict[str, Any],
|
||||||
|
filters: Filters | None,
|
||||||
|
entities_filter: EntityFilter | None,
|
||||||
|
) -> None:
|
||||||
"""Initialize the logbook view."""
|
"""Initialize the logbook view."""
|
||||||
self.config = config
|
self.config = config
|
||||||
self.filters = filters
|
self.filters = filters
|
||||||
self.entities_filter = entities_filter
|
self.entities_filter = entities_filter
|
||||||
|
|
||||||
async def get(self, request, datetime=None):
|
async def get(
|
||||||
|
self, request: web.Request, datetime: str | None = None
|
||||||
|
) -> web.Response:
|
||||||
"""Retrieve logbook entries."""
|
"""Retrieve logbook entries."""
|
||||||
if datetime:
|
if datetime:
|
||||||
if (datetime := dt_util.parse_datetime(datetime)) is None:
|
if (datetime_dt := dt_util.parse_datetime(datetime)) is None:
|
||||||
return self.json_message("Invalid datetime", HTTPStatus.BAD_REQUEST)
|
return self.json_message("Invalid datetime", HTTPStatus.BAD_REQUEST)
|
||||||
else:
|
else:
|
||||||
datetime = dt_util.start_of_local_day()
|
datetime_dt = dt_util.start_of_local_day()
|
||||||
|
|
||||||
if (period := request.query.get("period")) is None:
|
if (period_str := request.query.get("period")) is None:
|
||||||
period = 1
|
period: int = 1
|
||||||
else:
|
else:
|
||||||
period = int(period)
|
period = int(period_str)
|
||||||
|
|
||||||
if entity_ids := request.query.get("entity"):
|
if entity_ids_str := request.query.get("entity"):
|
||||||
try:
|
try:
|
||||||
entity_ids = cv.entity_ids(entity_ids)
|
entity_ids = cv.entity_ids(entity_ids_str)
|
||||||
except vol.Invalid:
|
except vol.Invalid:
|
||||||
raise InvalidEntityFormatError(
|
raise InvalidEntityFormatError(
|
||||||
f"Invalid entity id(s) encountered: {entity_ids}. "
|
f"Invalid entity id(s) encountered: {entity_ids_str}. "
|
||||||
"Format should be <domain>.<object_id>"
|
"Format should be <domain>.<object_id>"
|
||||||
) from vol.Invalid
|
) from vol.Invalid
|
||||||
|
else:
|
||||||
|
entity_ids = None
|
||||||
|
|
||||||
if (end_time := request.query.get("end_time")) is None:
|
if (end_time_str := request.query.get("end_time")) is None:
|
||||||
start_day = dt_util.as_utc(datetime) - timedelta(days=period - 1)
|
start_day = dt_util.as_utc(datetime_dt) - timedelta(days=period - 1)
|
||||||
end_day = start_day + timedelta(days=period)
|
end_day = start_day + timedelta(days=period)
|
||||||
else:
|
else:
|
||||||
start_day = datetime
|
start_day = datetime_dt
|
||||||
if (end_day := dt_util.parse_datetime(end_time)) is None:
|
if (end_day_dt := dt_util.parse_datetime(end_time_str)) is None:
|
||||||
return self.json_message("Invalid end_time", HTTPStatus.BAD_REQUEST)
|
return self.json_message("Invalid end_time", HTTPStatus.BAD_REQUEST)
|
||||||
|
end_day = end_day_dt
|
||||||
|
|
||||||
hass = request.app["hass"]
|
hass = request.app["hass"]
|
||||||
|
|
||||||
@ -248,7 +286,7 @@ class LogbookView(HomeAssistantView):
|
|||||||
"Can't combine entity with context_id", HTTPStatus.BAD_REQUEST
|
"Can't combine entity with context_id", HTTPStatus.BAD_REQUEST
|
||||||
)
|
)
|
||||||
|
|
||||||
def json_events():
|
def json_events() -> web.Response:
|
||||||
"""Fetch events and generate JSON."""
|
"""Fetch events and generate JSON."""
|
||||||
return self.json(
|
return self.json(
|
||||||
_get_events(
|
_get_events(
|
||||||
@ -263,10 +301,17 @@ class LogbookView(HomeAssistantView):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
return await get_instance(hass).async_add_executor_job(json_events)
|
return cast(
|
||||||
|
web.Response, await get_instance(hass).async_add_executor_job(json_events)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def humanify(hass, events, entity_attr_cache, context_lookup):
|
def humanify(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
events: Generator[LazyEventPartialState, None, None],
|
||||||
|
entity_attr_cache: EntityAttributeCache,
|
||||||
|
context_lookup: dict[str | None, LazyEventPartialState | None],
|
||||||
|
) -> Generator[dict[str, Any], None, None]:
|
||||||
"""Generate a converted list of events into Entry objects.
|
"""Generate a converted list of events into Entry objects.
|
||||||
|
|
||||||
Will try to group events if possible:
|
Will try to group events if possible:
|
||||||
@ -320,6 +365,7 @@ def humanify(hass, events, entity_attr_cache, context_lookup):
|
|||||||
# Skip all but the last sensor state
|
# Skip all but the last sensor state
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
assert entity_id is not None
|
||||||
data = {
|
data = {
|
||||||
"when": event.time_fired_isoformat,
|
"when": event.time_fired_isoformat,
|
||||||
"name": _entity_name_from_event(
|
"name": _entity_name_from_event(
|
||||||
@ -420,27 +466,28 @@ def humanify(hass, events, entity_attr_cache, context_lookup):
|
|||||||
|
|
||||||
|
|
||||||
def _get_events(
|
def _get_events(
|
||||||
hass,
|
hass: HomeAssistant,
|
||||||
start_day,
|
start_day: dt,
|
||||||
end_day,
|
end_day: dt,
|
||||||
entity_ids=None,
|
entity_ids: list[str] | None = None,
|
||||||
filters=None,
|
filters: Filters | None = None,
|
||||||
entities_filter=None,
|
entities_filter: EntityFilter | Callable[[str], bool] | None = None,
|
||||||
entity_matches_only=False,
|
entity_matches_only: bool = False,
|
||||||
context_id=None,
|
context_id: str | None = None,
|
||||||
):
|
) -> list[dict[str, Any]]:
|
||||||
"""Get events for a period of time."""
|
"""Get events for a period of time."""
|
||||||
assert not (
|
assert not (
|
||||||
entity_ids and context_id
|
entity_ids and context_id
|
||||||
), "can't pass in both entity_ids and context_id"
|
), "can't pass in both entity_ids and context_id"
|
||||||
|
|
||||||
entity_attr_cache = EntityAttributeCache(hass)
|
entity_attr_cache = EntityAttributeCache(hass)
|
||||||
context_lookup = {None: None}
|
event_data_cache: dict[str, dict[str, Any]] = {}
|
||||||
|
context_lookup: dict[str | None, LazyEventPartialState | None] = {None: None}
|
||||||
|
|
||||||
def yield_events(query):
|
def yield_events(query: Query) -> Generator[LazyEventPartialState, None, None]:
|
||||||
"""Yield Events that are not filtered away."""
|
"""Yield Events that are not filtered away."""
|
||||||
for row in query.yield_per(1000):
|
for row in query.yield_per(1000):
|
||||||
event = LazyEventPartialState(row)
|
event = LazyEventPartialState(row, event_data_cache)
|
||||||
context_lookup.setdefault(event.context_id, event)
|
context_lookup.setdefault(event.context_id, event)
|
||||||
if event.event_type == EVENT_CALL_SERVICE:
|
if event.event_type == EVENT_CALL_SERVICE:
|
||||||
continue
|
continue
|
||||||
@ -482,7 +529,7 @@ def _get_events(
|
|||||||
)
|
)
|
||||||
if filters:
|
if filters:
|
||||||
query = query.filter(
|
query = query.filter(
|
||||||
filters.entity_filter() | (Events.event_type != EVENT_STATE_CHANGED)
|
filters.entity_filter() | (Events.event_type != EVENT_STATE_CHANGED) # type: ignore[no-untyped-call]
|
||||||
)
|
)
|
||||||
|
|
||||||
if context_id is not None:
|
if context_id is not None:
|
||||||
@ -634,7 +681,11 @@ def _apply_event_entity_id_matchers(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _keep_event(hass, event, entities_filter):
|
def _keep_event(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
event: LazyEventPartialState,
|
||||||
|
entities_filter: EntityFilter | Callable[[str], bool] | None = None,
|
||||||
|
) -> bool:
|
||||||
if event.event_type in HOMEASSISTANT_EVENTS:
|
if event.event_type in HOMEASSISTANT_EVENTS:
|
||||||
return entities_filter is None or entities_filter(HA_DOMAIN_ENTITY_ID)
|
return entities_filter is None or entities_filter(HA_DOMAIN_ENTITY_ID)
|
||||||
|
|
||||||
@ -648,15 +699,21 @@ def _keep_event(hass, event, entities_filter):
|
|||||||
else:
|
else:
|
||||||
domain = event.data_domain
|
domain = event.data_domain
|
||||||
|
|
||||||
if domain is None:
|
return domain is not None and (
|
||||||
return False
|
entities_filter is None or entities_filter(f"{domain}._")
|
||||||
|
)
|
||||||
return entities_filter is None or entities_filter(f"{domain}._")
|
|
||||||
|
|
||||||
|
|
||||||
def _augment_data_with_context(
|
def _augment_data_with_context(
|
||||||
data, entity_id, event, context_lookup, entity_attr_cache, external_events
|
data: dict[str, Any],
|
||||||
):
|
entity_id: str | None,
|
||||||
|
event: LazyEventPartialState,
|
||||||
|
context_lookup: dict[str | None, LazyEventPartialState | None],
|
||||||
|
entity_attr_cache: EntityAttributeCache,
|
||||||
|
external_events: dict[
|
||||||
|
str, tuple[str, Callable[[LazyEventPartialState], dict[str, Any]]]
|
||||||
|
],
|
||||||
|
) -> None:
|
||||||
if not (context_event := context_lookup.get(event.context_id)):
|
if not (context_event := context_lookup.get(event.context_id)):
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -691,18 +748,14 @@ def _augment_data_with_context(
|
|||||||
data["context_event_type"] = event_type
|
data["context_event_type"] = event_type
|
||||||
return
|
return
|
||||||
|
|
||||||
if not entity_id:
|
if not entity_id or context_event == event:
|
||||||
return
|
return
|
||||||
|
|
||||||
attr_entity_id = event_data.get(ATTR_ENTITY_ID)
|
if (attr_entity_id := context_event.data_entity_id) is None or (
|
||||||
if not isinstance(attr_entity_id, str) or (
|
|
||||||
event_type in SCRIPT_AUTOMATION_EVENTS and attr_entity_id == entity_id
|
event_type in SCRIPT_AUTOMATION_EVENTS and attr_entity_id == entity_id
|
||||||
):
|
):
|
||||||
return
|
return
|
||||||
|
|
||||||
if context_event == event:
|
|
||||||
return
|
|
||||||
|
|
||||||
data["context_entity_id"] = attr_entity_id
|
data["context_entity_id"] = attr_entity_id
|
||||||
data["context_entity_id_name"] = _entity_name_from_event(
|
data["context_entity_id_name"] = _entity_name_from_event(
|
||||||
attr_entity_id, context_event, entity_attr_cache
|
attr_entity_id, context_event, entity_attr_cache
|
||||||
@ -716,7 +769,11 @@ def _augment_data_with_context(
|
|||||||
data["context_name"] = name
|
data["context_name"] = name
|
||||||
|
|
||||||
|
|
||||||
def _entity_name_from_event(entity_id, event, entity_attr_cache):
|
def _entity_name_from_event(
|
||||||
|
entity_id: str,
|
||||||
|
event: LazyEventPartialState,
|
||||||
|
entity_attr_cache: EntityAttributeCache,
|
||||||
|
) -> str:
|
||||||
"""Extract the entity name from the event using the cache if possible."""
|
"""Extract the entity name from the event using the cache if possible."""
|
||||||
return entity_attr_cache.get(
|
return entity_attr_cache.get(
|
||||||
entity_id, ATTR_FRIENDLY_NAME, event
|
entity_id, ATTR_FRIENDLY_NAME, event
|
||||||
@ -739,85 +796,87 @@ class LazyEventPartialState:
|
|||||||
"context_user_id",
|
"context_user_id",
|
||||||
"context_parent_id",
|
"context_parent_id",
|
||||||
"time_fired_minute",
|
"time_fired_minute",
|
||||||
|
"_event_data_cache",
|
||||||
]
|
]
|
||||||
|
|
||||||
def __init__(self, row):
|
def __init__(
|
||||||
|
self,
|
||||||
|
row: Row,
|
||||||
|
event_data_cache: dict[str, dict[str, Any]],
|
||||||
|
) -> None:
|
||||||
"""Init the lazy event."""
|
"""Init the lazy event."""
|
||||||
self._row = row
|
self._row = row
|
||||||
self._event_data = None
|
self._event_data: dict[str, Any] | None = None
|
||||||
self._time_fired_isoformat = None
|
self._time_fired_isoformat: dt | None = None
|
||||||
self._attributes = None
|
self._domain: str | None = None
|
||||||
self._domain = None
|
self.event_type: str = self._row.event_type
|
||||||
self.event_type = self._row.event_type
|
self.entity_id: str | None = self._row.entity_id
|
||||||
self.entity_id = self._row.entity_id
|
|
||||||
self.state = self._row.state
|
self.state = self._row.state
|
||||||
self.context_id = self._row.context_id
|
self.context_id: str | None = self._row.context_id
|
||||||
self.context_user_id = self._row.context_user_id
|
self.context_user_id: str | None = self._row.context_user_id
|
||||||
self.context_parent_id = self._row.context_parent_id
|
self.context_parent_id: str | None = self._row.context_parent_id
|
||||||
self.time_fired_minute = self._row.time_fired.minute
|
self.time_fired_minute: int = self._row.time_fired.minute
|
||||||
|
self._event_data_cache = event_data_cache
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def domain(self):
|
def domain(self) -> str | None:
|
||||||
"""Return the domain for the state."""
|
"""Return the domain for the state."""
|
||||||
if self._domain is None:
|
if self._domain is None:
|
||||||
|
assert self.entity_id is not None
|
||||||
self._domain = split_entity_id(self.entity_id)[0]
|
self._domain = split_entity_id(self.entity_id)[0]
|
||||||
return self._domain
|
return self._domain
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def attributes_icon(self):
|
def attributes_icon(self) -> str | None:
|
||||||
"""Extract the icon from the decoded attributes or json."""
|
"""Extract the icon from the decoded attributes or json."""
|
||||||
if self._attributes:
|
|
||||||
return self._attributes.get(ATTR_ICON)
|
|
||||||
result = ICON_JSON_EXTRACT.search(
|
result = ICON_JSON_EXTRACT.search(
|
||||||
self._row.shared_attrs or self._row.attributes
|
self._row.shared_attrs or self._row.attributes or ""
|
||||||
)
|
)
|
||||||
return result and result.group(1)
|
return result.group(1) if result else None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def data_entity_id(self):
|
def data_entity_id(self) -> str | None:
|
||||||
"""Extract the entity id from the decoded data or json."""
|
"""Extract the entity id from the decoded data or json."""
|
||||||
if self._event_data:
|
if self._event_data:
|
||||||
return self._event_data.get(ATTR_ENTITY_ID)
|
return self._event_data.get(ATTR_ENTITY_ID)
|
||||||
|
|
||||||
result = ENTITY_ID_JSON_EXTRACT.search(self._row.event_data)
|
result = ENTITY_ID_JSON_EXTRACT.search(self._row.event_data)
|
||||||
return result and result.group(1)
|
return result.group(1) if result else None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def data_domain(self):
|
def attributes_friendly_name(self) -> str | None:
|
||||||
|
"""Extract the friendly name from the decoded attributes or json."""
|
||||||
|
result = FRIENDLY_NAME_JSON_EXTRACT.search(
|
||||||
|
self._row.shared_attrs or self._row.attributes or ""
|
||||||
|
)
|
||||||
|
return result.group(1) if result else None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data_domain(self) -> str | None:
|
||||||
"""Extract the domain from the decoded data or json."""
|
"""Extract the domain from the decoded data or json."""
|
||||||
if self._event_data:
|
|
||||||
return self._event_data.get(ATTR_DOMAIN)
|
|
||||||
|
|
||||||
result = DOMAIN_JSON_EXTRACT.search(self._row.event_data)
|
result = DOMAIN_JSON_EXTRACT.search(self._row.event_data)
|
||||||
return result and result.group(1)
|
return result.group(1) if result else None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def attributes(self):
|
def data(self) -> dict[str, Any]:
|
||||||
"""State attributes."""
|
|
||||||
if self._attributes is None:
|
|
||||||
source = self._row.shared_attrs or self._row.attributes
|
|
||||||
if source == EMPTY_JSON_OBJECT or source is None:
|
|
||||||
self._attributes = {}
|
|
||||||
else:
|
|
||||||
self._attributes = json.loads(source)
|
|
||||||
return self._attributes
|
|
||||||
|
|
||||||
@property
|
|
||||||
def data(self):
|
|
||||||
"""Event data."""
|
"""Event data."""
|
||||||
if not self._event_data:
|
if not self._event_data:
|
||||||
if self._row.event_data == EMPTY_JSON_OBJECT:
|
source: str = self._row.event_data
|
||||||
self._event_data = {}
|
if event_data := self._event_data_cache.get(source):
|
||||||
|
self._event_data = event_data
|
||||||
else:
|
else:
|
||||||
self._event_data = json.loads(self._row.event_data)
|
self._event_data = self._event_data_cache[source] = cast(
|
||||||
|
dict[str, Any], json.loads(source)
|
||||||
|
)
|
||||||
return self._event_data
|
return self._event_data
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def time_fired_isoformat(self):
|
def time_fired_isoformat(self) -> dt | None:
|
||||||
"""Time event was fired in utc isoformat."""
|
"""Time event was fired in utc isoformat."""
|
||||||
if not self._time_fired_isoformat:
|
if not self._time_fired_isoformat:
|
||||||
self._time_fired_isoformat = process_timestamp_to_utc_isoformat(
|
self._time_fired_isoformat = (
|
||||||
self._row.time_fired or dt_util.utcnow()
|
process_timestamp_to_utc_isoformat(self._row.time_fired)
|
||||||
|
or dt_util.utcnow()
|
||||||
)
|
)
|
||||||
|
|
||||||
return self._time_fired_isoformat
|
return self._time_fired_isoformat
|
||||||
@ -841,15 +900,19 @@ class EntityAttributeCache:
|
|||||||
if attribute in self._cache[entity_id]:
|
if attribute in self._cache[entity_id]:
|
||||||
return self._cache[entity_id][attribute]
|
return self._cache[entity_id][attribute]
|
||||||
else:
|
else:
|
||||||
self._cache[entity_id] = {}
|
cache = self._cache[entity_id] = {}
|
||||||
|
|
||||||
if current_state := self._hass.states.get(entity_id):
|
if current_state := self._hass.states.get(entity_id):
|
||||||
# Try the current state as its faster than decoding the
|
# Try the current state as its faster than decoding the
|
||||||
# attributes
|
# attributes
|
||||||
self._cache[entity_id][attribute] = current_state.attributes.get(attribute)
|
cache[attribute] = current_state.attributes.get(attribute)
|
||||||
else:
|
else:
|
||||||
# If the entity has been removed, decode the attributes
|
# If the entity has been removed, decode the attributes
|
||||||
# instead
|
# instead
|
||||||
self._cache[entity_id][attribute] = event.attributes.get(attribute)
|
if attribute != ATTR_FRIENDLY_NAME:
|
||||||
|
raise ValueError(
|
||||||
|
f"{attribute} is not supported by {self.__class__.__name__}"
|
||||||
|
)
|
||||||
|
cache[attribute] = event.attributes_friendly_name
|
||||||
|
|
||||||
return self._cache[entity_id][attribute]
|
return cache[attribute]
|
||||||
|
@ -404,4 +404,4 @@ def _create_state_changed_event_from_old_new(
|
|||||||
# pylint: disable=import-outside-toplevel
|
# pylint: disable=import-outside-toplevel
|
||||||
from homeassistant.components import logbook
|
from homeassistant.components import logbook
|
||||||
|
|
||||||
return logbook.LazyEventPartialState(row)
|
return logbook.LazyEventPartialState(row, {})
|
||||||
|
11
mypy.ini
11
mypy.ini
@ -1292,6 +1292,17 @@ no_implicit_optional = true
|
|||||||
warn_return_any = true
|
warn_return_any = true
|
||||||
warn_unreachable = true
|
warn_unreachable = true
|
||||||
|
|
||||||
|
[mypy-homeassistant.components.logbook.*]
|
||||||
|
check_untyped_defs = true
|
||||||
|
disallow_incomplete_defs = true
|
||||||
|
disallow_subclassing_any = true
|
||||||
|
disallow_untyped_calls = true
|
||||||
|
disallow_untyped_decorators = true
|
||||||
|
disallow_untyped_defs = true
|
||||||
|
no_implicit_optional = true
|
||||||
|
warn_return_any = true
|
||||||
|
warn_unreachable = true
|
||||||
|
|
||||||
[mypy-homeassistant.components.lookin.*]
|
[mypy-homeassistant.components.lookin.*]
|
||||||
check_untyped_defs = true
|
check_untyped_defs = true
|
||||||
disallow_incomplete_defs = true
|
disallow_incomplete_defs = true
|
||||||
|
@ -113,6 +113,35 @@ async def test_service_call_create_logbook_entry(hass_):
|
|||||||
assert last_call.data.get(logbook.ATTR_DOMAIN) == "logbook"
|
assert last_call.data.get(logbook.ATTR_DOMAIN) == "logbook"
|
||||||
|
|
||||||
|
|
||||||
|
async def test_service_call_create_logbook_entry_invalid_entity_id(hass, recorder_mock):
|
||||||
|
"""Test if service call create log book entry with an invalid entity id."""
|
||||||
|
await async_setup_component(hass, "logbook", {})
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
hass.bus.async_fire(
|
||||||
|
logbook.EVENT_LOGBOOK_ENTRY,
|
||||||
|
{
|
||||||
|
logbook.ATTR_NAME: "Alarm",
|
||||||
|
logbook.ATTR_MESSAGE: "is triggered",
|
||||||
|
logbook.ATTR_DOMAIN: "switch",
|
||||||
|
logbook.ATTR_ENTITY_ID: 1234,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
|
events = list(
|
||||||
|
logbook._get_events(
|
||||||
|
hass,
|
||||||
|
dt_util.utcnow() - timedelta(hours=1),
|
||||||
|
dt_util.utcnow() + timedelta(hours=1),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
assert len(events) == 1
|
||||||
|
assert events[0][logbook.ATTR_DOMAIN] == "switch"
|
||||||
|
assert events[0][logbook.ATTR_NAME] == "Alarm"
|
||||||
|
assert events[0][logbook.ATTR_ENTITY_ID] == 1234
|
||||||
|
assert events[0][logbook.ATTR_MESSAGE] == "is triggered"
|
||||||
|
|
||||||
|
|
||||||
async def test_service_call_create_log_book_entry_no_message(hass_):
|
async def test_service_call_create_log_book_entry_no_message(hass_):
|
||||||
"""Test if service call create log book entry without message."""
|
"""Test if service call create log book entry without message."""
|
||||||
calls = async_capture_events(hass_, logbook.EVENT_LOGBOOK_ENTRY)
|
calls = async_capture_events(hass_, logbook.EVENT_LOGBOOK_ENTRY)
|
||||||
@ -175,6 +204,14 @@ def test_home_assistant_start_stop_grouped(hass_):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_unsupported_attributes_in_cache_throws(hass):
|
||||||
|
"""Test unsupported attributes in cache."""
|
||||||
|
entity_attr_cache = logbook.EntityAttributeCache(hass)
|
||||||
|
event = MockLazyEventPartialState(EVENT_STATE_CHANGED)
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
entity_attr_cache.get("sensor.xyz", "not_supported", event)
|
||||||
|
|
||||||
|
|
||||||
def test_home_assistant_start(hass_):
|
def test_home_assistant_start(hass_):
|
||||||
"""Test if HA start is not filtered or converted into a restart."""
|
"""Test if HA start is not filtered or converted into a restart."""
|
||||||
entity_id = "switch.bla"
|
entity_id = "switch.bla"
|
||||||
@ -295,7 +332,7 @@ def create_state_changed_event_from_old_new(
|
|||||||
row.context_parent_id = None
|
row.context_parent_id = None
|
||||||
row.old_state_id = old_state and 1
|
row.old_state_id = old_state and 1
|
||||||
row.state_id = new_state and 1
|
row.state_id = new_state and 1
|
||||||
return logbook.LazyEventPartialState(row)
|
return logbook.LazyEventPartialState(row, {})
|
||||||
|
|
||||||
|
|
||||||
async def test_logbook_view(hass, hass_client, recorder_mock):
|
async def test_logbook_view(hass, hass_client, recorder_mock):
|
||||||
@ -307,6 +344,26 @@ async def test_logbook_view(hass, hass_client, recorder_mock):
|
|||||||
assert response.status == HTTPStatus.OK
|
assert response.status == HTTPStatus.OK
|
||||||
|
|
||||||
|
|
||||||
|
async def test_logbook_view_invalid_start_date_time(hass, hass_client, recorder_mock):
|
||||||
|
"""Test the logbook view with an invalid date time."""
|
||||||
|
await async_setup_component(hass, "logbook", {})
|
||||||
|
await async_recorder_block_till_done(hass)
|
||||||
|
client = await hass_client()
|
||||||
|
response = await client.get("/api/logbook/INVALID")
|
||||||
|
assert response.status == HTTPStatus.BAD_REQUEST
|
||||||
|
|
||||||
|
|
||||||
|
async def test_logbook_view_invalid_end_date_time(hass, hass_client, recorder_mock):
|
||||||
|
"""Test the logbook view."""
|
||||||
|
await async_setup_component(hass, "logbook", {})
|
||||||
|
await async_recorder_block_till_done(hass)
|
||||||
|
client = await hass_client()
|
||||||
|
response = await client.get(
|
||||||
|
f"/api/logbook/{dt_util.utcnow().isoformat()}?end_time=INVALID"
|
||||||
|
)
|
||||||
|
assert response.status == HTTPStatus.BAD_REQUEST
|
||||||
|
|
||||||
|
|
||||||
async def test_logbook_view_period_entity(hass, hass_client, recorder_mock, set_utc):
|
async def test_logbook_view_period_entity(hass, hass_client, recorder_mock, set_utc):
|
||||||
"""Test the logbook view with period and entity."""
|
"""Test the logbook view with period and entity."""
|
||||||
await async_setup_component(hass, "logbook", {})
|
await async_setup_component(hass, "logbook", {})
|
||||||
@ -1437,6 +1494,34 @@ async def test_icon_and_state(hass, hass_client, recorder_mock):
|
|||||||
assert response_json[2]["state"] == STATE_OFF
|
assert response_json[2]["state"] == STATE_OFF
|
||||||
|
|
||||||
|
|
||||||
|
async def test_fire_logbook_entries(hass, hass_client, recorder_mock):
|
||||||
|
"""Test many logbook entry calls."""
|
||||||
|
await async_setup_component(hass, "logbook", {})
|
||||||
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
|
for _ in range(10):
|
||||||
|
hass.bus.async_fire(
|
||||||
|
logbook.EVENT_LOGBOOK_ENTRY,
|
||||||
|
{
|
||||||
|
logbook.ATTR_NAME: "Alarm",
|
||||||
|
logbook.ATTR_MESSAGE: "is triggered",
|
||||||
|
logbook.ATTR_DOMAIN: "switch",
|
||||||
|
logbook.ATTR_ENTITY_ID: "sensor.xyz",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
hass.bus.async_fire(
|
||||||
|
logbook.EVENT_LOGBOOK_ENTRY,
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
|
client = await hass_client()
|
||||||
|
response_json = await _async_fetch_logbook(client)
|
||||||
|
|
||||||
|
# The empty events should be skipped
|
||||||
|
assert len(response_json) == 10
|
||||||
|
|
||||||
|
|
||||||
async def test_exclude_events_domain(hass, hass_client, recorder_mock):
|
async def test_exclude_events_domain(hass, hass_client, recorder_mock):
|
||||||
"""Test if events are filtered if domain is excluded in config."""
|
"""Test if events are filtered if domain is excluded in config."""
|
||||||
entity_id = "switch.bla"
|
entity_id = "switch.bla"
|
||||||
|
Loading…
x
Reference in New Issue
Block a user