Move logbook continuous domain filtering to sql (#37115)

* Move logbook continuous domain filtering to sql

sensors tend to generate a significant amount of states
that are filtered out by logbook.  In testing 75% of
states can be filtered away in sql to avoid the
sqlalchemy ORM overhead of creating objects that will
be discarded.

* remove un-needed nesting
This commit is contained in:
J. Nick Koston 2020-06-26 09:12:50 -05:00 committed by GitHub
parent fe1a7f6d69
commit 76db2b39b0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 48 additions and 23 deletions

View File

@ -5,6 +5,7 @@ import json
import logging
import time
import sqlalchemy
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.orm import aliased
import voluptuous as vol
@ -28,7 +29,6 @@ from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_FRIENDLY_NAME,
ATTR_NAME,
ATTR_UNIT_OF_MEASUREMENT,
CONF_EXCLUDE,
CONF_INCLUDE,
EVENT_HOMEASSISTANT_START,
@ -66,6 +66,8 @@ DOMAIN = "logbook"
GROUP_BY_MINUTES = 15
EMPTY_JSON_OBJECT = "{}"
UNIT_OF_MEASUREMENT_JSON = '"unit_of_measurement":'
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA}, extra=vol.ALLOW_EXTRA
)
@ -414,6 +416,15 @@ def _get_events(hass, config, start_day, end_day, entity_id=None):
& (States.state != old_state.state)
)
)
#
# Prefilter out continuous domains that have
# ATTR_UNIT_OF_MEASUREMENT as its much faster in sql.
#
.filter(
(Events.event_type != EVENT_STATE_CHANGED)
| sqlalchemy.not_(States.domain.in_(CONTINUOUS_DOMAINS))
| sqlalchemy.not_(States.attributes.contains(UNIT_OF_MEASUREMENT_JSON))
)
.filter(
Events.event_type.in_(ALL_EVENT_TYPES + list(hass.data.get(DOMAIN, {})))
)
@ -449,12 +460,6 @@ def _keep_event(hass, event, entities_filter, entity_attr_cache):
# Do not report on entity removal
if not event.has_old_and_new_state:
return False
if event.domain in CONTINUOUS_DOMAINS and entity_attr_cache.get(
entity_id, ATTR_UNIT_OF_MEASUREMENT, event
):
# Don't show continuous sensor value changes in the logbook
return False
elif event.event_type in HOMEASSISTANT_EVENTS:
entity_id = f"{HA_DOMAIN}."
elif event.event_type in hass.data[DOMAIN] and ATTR_ENTITY_ID not in event.data:

View File

@ -149,22 +149,6 @@ class TestComponentLogbook(unittest.TestCase):
entries[1], pointC, "bla", domain="sensor", entity_id=entity_id
)
def test_filter_continuous_sensor_values(self):
"""Test remove continuous sensor events from logbook."""
entity_id = "sensor.bla"
pointA = dt_util.utcnow()
entity_attr_cache = logbook.EntityAttributeCache(self.hass)
attributes = {"unit_of_measurement": "foo"}
eventA = self.create_state_changed_event(pointA, entity_id, 10, attributes)
entities_filter = convert_include_exclude_filter(
logbook.CONFIG_SCHEMA({logbook.DOMAIN: {}})[logbook.DOMAIN]
)
assert (
logbook._keep_event(self.hass, eventA, entities_filter, entity_attr_cache)
is False
)
def test_exclude_new_entities(self):
"""Test if events are excluded on first update."""
entity_id = "sensor.bla"
@ -1806,6 +1790,42 @@ async def test_logbook_entity_filter_with_automations(hass, hass_client):
assert json_dict[0]["entity_id"] == entity_id_second
async def test_filter_continuous_sensor_values(hass, hass_client):
"""Test remove continuous sensor events from logbook."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await hass.async_add_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
entity_id_test = "switch.test"
hass.states.async_set(entity_id_test, STATE_OFF)
hass.states.async_set(entity_id_test, STATE_ON)
entity_id_second = "sensor.bla"
hass.states.async_set(entity_id_second, STATE_OFF, {"unit_of_measurement": "foo"})
hass.states.async_set(entity_id_second, STATE_ON, {"unit_of_measurement": "foo"})
entity_id_third = "light.bla"
hass.states.async_set(entity_id_third, STATE_OFF, {"unit_of_measurement": "foo"})
hass.states.async_set(entity_id_third, STATE_ON, {"unit_of_measurement": "foo"})
await hass.async_add_job(partial(trigger_db_commit, hass))
await hass.async_block_till_done()
await hass.async_add_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries without filters
response = await client.get(f"/api/logbook/{start_date.isoformat()}")
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 2
assert response_json[0]["entity_id"] == entity_id_test
assert response_json[1]["entity_id"] == entity_id_third
class MockLazyEventPartialState(ha.Event):
"""Minimal mock of a Lazy event."""