mirror of
https://github.com/home-assistant/core.git
synced 2025-07-19 11:17:21 +00:00
Avoid duplicated database queries when fetching statistics (#52433)
This commit is contained in:
parent
7b940d2382
commit
11fd9d9525
@ -126,7 +126,7 @@ def compile_statistics(instance: Recorder, start: datetime.datetime) -> bool:
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def _get_meta_data(hass, session, statistic_ids, statistic_type):
|
def _get_metadata(hass, session, statistic_ids, statistic_type):
|
||||||
"""Fetch meta data."""
|
"""Fetch meta data."""
|
||||||
|
|
||||||
def _meta(metas, wanted_metadata_id):
|
def _meta(metas, wanted_metadata_id):
|
||||||
@ -166,18 +166,23 @@ def list_statistic_ids(hass, statistic_type=None):
|
|||||||
"""Return statistic_ids and meta data."""
|
"""Return statistic_ids and meta data."""
|
||||||
units = hass.config.units
|
units = hass.config.units
|
||||||
with session_scope(hass=hass) as session:
|
with session_scope(hass=hass) as session:
|
||||||
meta_data = _get_meta_data(hass, session, None, statistic_type)
|
metadata = _get_metadata(hass, session, None, statistic_type)
|
||||||
|
|
||||||
for meta in meta_data.values():
|
for meta in metadata.values():
|
||||||
unit = _configured_unit(meta["unit_of_measurement"], units)
|
unit = _configured_unit(meta["unit_of_measurement"], units)
|
||||||
meta["unit_of_measurement"] = unit
|
meta["unit_of_measurement"] = unit
|
||||||
|
|
||||||
return list(meta_data.values())
|
return list(metadata.values())
|
||||||
|
|
||||||
|
|
||||||
def statistics_during_period(hass, start_time, end_time=None, statistic_ids=None):
|
def statistics_during_period(hass, start_time, end_time=None, statistic_ids=None):
|
||||||
"""Return states changes during UTC period start_time - end_time."""
|
"""Return states changes during UTC period start_time - end_time."""
|
||||||
|
metadata = None
|
||||||
with session_scope(hass=hass) as session:
|
with session_scope(hass=hass) as session:
|
||||||
|
metadata = _get_metadata(hass, session, statistic_ids, None)
|
||||||
|
if not metadata:
|
||||||
|
return {}
|
||||||
|
|
||||||
baked_query = hass.data[STATISTICS_BAKERY](
|
baked_query = hass.data[STATISTICS_BAKERY](
|
||||||
lambda session: session.query(*QUERY_STATISTICS)
|
lambda session: session.query(*QUERY_STATISTICS)
|
||||||
)
|
)
|
||||||
@ -192,10 +197,7 @@ def statistics_during_period(hass, start_time, end_time=None, statistic_ids=None
|
|||||||
baked_query += lambda q: q.filter(
|
baked_query += lambda q: q.filter(
|
||||||
Statistics.metadata_id.in_(bindparam("metadata_ids"))
|
Statistics.metadata_id.in_(bindparam("metadata_ids"))
|
||||||
)
|
)
|
||||||
statistic_ids = [statistic_id.lower() for statistic_id in statistic_ids]
|
metadata_ids = list(metadata.keys())
|
||||||
metadata_ids = _get_metadata_ids(hass, session, statistic_ids)
|
|
||||||
if not metadata_ids:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
baked_query += lambda q: q.order_by(Statistics.metadata_id, Statistics.start)
|
baked_query += lambda q: q.order_by(Statistics.metadata_id, Statistics.start)
|
||||||
|
|
||||||
@ -204,24 +206,23 @@ def statistics_during_period(hass, start_time, end_time=None, statistic_ids=None
|
|||||||
start_time=start_time, end_time=end_time, metadata_ids=metadata_ids
|
start_time=start_time, end_time=end_time, metadata_ids=metadata_ids
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
meta_data = _get_meta_data(hass, session, statistic_ids, None)
|
return _sorted_statistics_to_dict(hass, stats, statistic_ids, metadata)
|
||||||
return _sorted_statistics_to_dict(hass, stats, statistic_ids, meta_data)
|
|
||||||
|
|
||||||
|
|
||||||
def get_last_statistics(hass, number_of_stats, statistic_id=None):
|
def get_last_statistics(hass, number_of_stats, statistic_id):
|
||||||
"""Return the last number_of_stats statistics."""
|
"""Return the last number_of_stats statistics for a statistic_id."""
|
||||||
|
statistic_ids = [statistic_id]
|
||||||
with session_scope(hass=hass) as session:
|
with session_scope(hass=hass) as session:
|
||||||
|
metadata = _get_metadata(hass, session, statistic_ids, None)
|
||||||
|
if not metadata:
|
||||||
|
return {}
|
||||||
|
|
||||||
baked_query = hass.data[STATISTICS_BAKERY](
|
baked_query = hass.data[STATISTICS_BAKERY](
|
||||||
lambda session: session.query(*QUERY_STATISTICS)
|
lambda session: session.query(*QUERY_STATISTICS)
|
||||||
)
|
)
|
||||||
|
|
||||||
metadata_id = None
|
baked_query += lambda q: q.filter_by(metadata_id=bindparam("metadata_id"))
|
||||||
if statistic_id is not None:
|
metadata_id = next(iter(metadata.keys()))
|
||||||
baked_query += lambda q: q.filter_by(metadata_id=bindparam("metadata_id"))
|
|
||||||
metadata_ids = _get_metadata_ids(hass, session, [statistic_id])
|
|
||||||
if not metadata_ids:
|
|
||||||
return {}
|
|
||||||
metadata_id = metadata_ids[0]
|
|
||||||
|
|
||||||
baked_query += lambda q: q.order_by(
|
baked_query += lambda q: q.order_by(
|
||||||
Statistics.metadata_id, Statistics.start.desc()
|
Statistics.metadata_id, Statistics.start.desc()
|
||||||
@ -235,16 +236,14 @@ def get_last_statistics(hass, number_of_stats, statistic_id=None):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
statistic_ids = [statistic_id] if statistic_id is not None else None
|
return _sorted_statistics_to_dict(hass, stats, statistic_ids, metadata)
|
||||||
meta_data = _get_meta_data(hass, session, statistic_ids, None)
|
|
||||||
return _sorted_statistics_to_dict(hass, stats, statistic_ids, meta_data)
|
|
||||||
|
|
||||||
|
|
||||||
def _sorted_statistics_to_dict(
|
def _sorted_statistics_to_dict(
|
||||||
hass,
|
hass,
|
||||||
stats,
|
stats,
|
||||||
statistic_ids,
|
statistic_ids,
|
||||||
meta_data,
|
metadata,
|
||||||
):
|
):
|
||||||
"""Convert SQL results into JSON friendly data structure."""
|
"""Convert SQL results into JSON friendly data structure."""
|
||||||
result = defaultdict(list)
|
result = defaultdict(list)
|
||||||
@ -260,8 +259,8 @@ def _sorted_statistics_to_dict(
|
|||||||
|
|
||||||
# Append all statistic entries, and do unit conversion
|
# Append all statistic entries, and do unit conversion
|
||||||
for meta_id, group in groupby(stats, lambda state: state.metadata_id):
|
for meta_id, group in groupby(stats, lambda state: state.metadata_id):
|
||||||
unit = meta_data[meta_id]["unit_of_measurement"]
|
unit = metadata[meta_id]["unit_of_measurement"]
|
||||||
statistic_id = meta_data[meta_id]["statistic_id"]
|
statistic_id = metadata[meta_id]["statistic_id"]
|
||||||
convert = UNIT_CONVERSIONS.get(unit, lambda x, units: x)
|
convert = UNIT_CONVERSIONS.get(unit, lambda x, units: x)
|
||||||
ent_results = result[meta_id]
|
ent_results = result[meta_id]
|
||||||
ent_results.extend(
|
ent_results.extend(
|
||||||
@ -279,4 +278,4 @@ def _sorted_statistics_to_dict(
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Filter out the empty lists if some states had 0 results.
|
# Filter out the empty lists if some states had 0 results.
|
||||||
return {meta_data[key]["statistic_id"]: val for key, val in result.items() if val}
|
return {metadata[key]["statistic_id"]: val for key, val in result.items() if val}
|
||||||
|
@ -8,7 +8,10 @@ from pytest import approx
|
|||||||
from homeassistant.components.recorder import history
|
from homeassistant.components.recorder import history
|
||||||
from homeassistant.components.recorder.const import DATA_INSTANCE
|
from homeassistant.components.recorder.const import DATA_INSTANCE
|
||||||
from homeassistant.components.recorder.models import process_timestamp_to_utc_isoformat
|
from homeassistant.components.recorder.models import process_timestamp_to_utc_isoformat
|
||||||
from homeassistant.components.recorder.statistics import statistics_during_period
|
from homeassistant.components.recorder.statistics import (
|
||||||
|
get_last_statistics,
|
||||||
|
statistics_during_period,
|
||||||
|
)
|
||||||
from homeassistant.const import TEMP_CELSIUS
|
from homeassistant.const import TEMP_CELSIUS
|
||||||
from homeassistant.setup import setup_component
|
from homeassistant.setup import setup_component
|
||||||
import homeassistant.util.dt as dt_util
|
import homeassistant.util.dt as dt_util
|
||||||
@ -25,24 +28,69 @@ def test_compile_hourly_statistics(hass_recorder):
|
|||||||
hist = history.get_significant_states(hass, zero, four)
|
hist = history.get_significant_states(hass, zero, four)
|
||||||
assert dict(states) == dict(hist)
|
assert dict(states) == dict(hist)
|
||||||
|
|
||||||
recorder.do_adhoc_statistics(period="hourly", start=zero)
|
|
||||||
wait_recording_done(hass)
|
|
||||||
for kwargs in ({}, {"statistic_ids": ["sensor.test1"]}):
|
for kwargs in ({}, {"statistic_ids": ["sensor.test1"]}):
|
||||||
stats = statistics_during_period(hass, zero, **kwargs)
|
stats = statistics_during_period(hass, zero, **kwargs)
|
||||||
assert stats == {
|
assert stats == {}
|
||||||
"sensor.test1": [
|
stats = get_last_statistics(hass, 0, "sensor.test1")
|
||||||
{
|
assert stats == {}
|
||||||
"statistic_id": "sensor.test1",
|
|
||||||
"start": process_timestamp_to_utc_isoformat(zero),
|
recorder.do_adhoc_statistics(period="hourly", start=zero)
|
||||||
"mean": approx(14.915254237288135),
|
recorder.do_adhoc_statistics(period="hourly", start=four)
|
||||||
"min": approx(10.0),
|
wait_recording_done(hass)
|
||||||
"max": approx(20.0),
|
expected_1 = {
|
||||||
"last_reset": None,
|
"statistic_id": "sensor.test1",
|
||||||
"state": None,
|
"start": process_timestamp_to_utc_isoformat(zero),
|
||||||
"sum": None,
|
"mean": approx(14.915254237288135),
|
||||||
}
|
"min": approx(10.0),
|
||||||
]
|
"max": approx(20.0),
|
||||||
}
|
"last_reset": None,
|
||||||
|
"state": None,
|
||||||
|
"sum": None,
|
||||||
|
}
|
||||||
|
expected_2 = {
|
||||||
|
"statistic_id": "sensor.test1",
|
||||||
|
"start": process_timestamp_to_utc_isoformat(four),
|
||||||
|
"mean": approx(20.0),
|
||||||
|
"min": approx(20.0),
|
||||||
|
"max": approx(20.0),
|
||||||
|
"last_reset": None,
|
||||||
|
"state": None,
|
||||||
|
"sum": None,
|
||||||
|
}
|
||||||
|
expected_stats1 = [
|
||||||
|
{**expected_1, "statistic_id": "sensor.test1"},
|
||||||
|
{**expected_2, "statistic_id": "sensor.test1"},
|
||||||
|
]
|
||||||
|
expected_stats2 = [
|
||||||
|
{**expected_1, "statistic_id": "sensor.test2"},
|
||||||
|
{**expected_2, "statistic_id": "sensor.test2"},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Test statistics_during_period
|
||||||
|
stats = statistics_during_period(hass, zero)
|
||||||
|
assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2}
|
||||||
|
|
||||||
|
stats = statistics_during_period(hass, zero, statistic_ids=["sensor.test2"])
|
||||||
|
assert stats == {"sensor.test2": expected_stats2}
|
||||||
|
|
||||||
|
stats = statistics_during_period(hass, zero, statistic_ids=["sensor.test3"])
|
||||||
|
assert stats == {}
|
||||||
|
|
||||||
|
# Test get_last_statistics
|
||||||
|
stats = get_last_statistics(hass, 0, "sensor.test1")
|
||||||
|
assert stats == {}
|
||||||
|
|
||||||
|
stats = get_last_statistics(hass, 1, "sensor.test1")
|
||||||
|
assert stats == {"sensor.test1": [{**expected_2, "statistic_id": "sensor.test1"}]}
|
||||||
|
|
||||||
|
stats = get_last_statistics(hass, 2, "sensor.test1")
|
||||||
|
assert stats == {"sensor.test1": expected_stats1[::-1]}
|
||||||
|
|
||||||
|
stats = get_last_statistics(hass, 3, "sensor.test1")
|
||||||
|
assert stats == {"sensor.test1": expected_stats1[::-1]}
|
||||||
|
|
||||||
|
stats = get_last_statistics(hass, 1, "sensor.test3")
|
||||||
|
assert stats == {}
|
||||||
|
|
||||||
|
|
||||||
def record_states(hass):
|
def record_states(hass):
|
||||||
@ -54,13 +102,19 @@ def record_states(hass):
|
|||||||
sns1 = "sensor.test1"
|
sns1 = "sensor.test1"
|
||||||
sns2 = "sensor.test2"
|
sns2 = "sensor.test2"
|
||||||
sns3 = "sensor.test3"
|
sns3 = "sensor.test3"
|
||||||
|
sns4 = "sensor.test4"
|
||||||
sns1_attr = {
|
sns1_attr = {
|
||||||
"device_class": "temperature",
|
"device_class": "temperature",
|
||||||
"state_class": "measurement",
|
"state_class": "measurement",
|
||||||
"unit_of_measurement": TEMP_CELSIUS,
|
"unit_of_measurement": TEMP_CELSIUS,
|
||||||
}
|
}
|
||||||
sns2_attr = {"device_class": "temperature"}
|
sns2_attr = {
|
||||||
sns3_attr = {}
|
"device_class": "humidity",
|
||||||
|
"state_class": "measurement",
|
||||||
|
"unit_of_measurement": "%",
|
||||||
|
}
|
||||||
|
sns3_attr = {"device_class": "temperature"}
|
||||||
|
sns4_attr = {}
|
||||||
|
|
||||||
def set_state(entity_id, state, **kwargs):
|
def set_state(entity_id, state, **kwargs):
|
||||||
"""Set the state."""
|
"""Set the state."""
|
||||||
@ -74,7 +128,7 @@ def record_states(hass):
|
|||||||
three = two + timedelta(minutes=30)
|
three = two + timedelta(minutes=30)
|
||||||
four = three + timedelta(minutes=15)
|
four = three + timedelta(minutes=15)
|
||||||
|
|
||||||
states = {mp: [], sns1: [], sns2: [], sns3: []}
|
states = {mp: [], sns1: [], sns2: [], sns3: [], sns4: []}
|
||||||
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=one):
|
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=one):
|
||||||
states[mp].append(
|
states[mp].append(
|
||||||
set_state(mp, "idle", attributes={"media_title": str(sentinel.mt1)})
|
set_state(mp, "idle", attributes={"media_title": str(sentinel.mt1)})
|
||||||
@ -85,15 +139,18 @@ def record_states(hass):
|
|||||||
states[sns1].append(set_state(sns1, "10", attributes=sns1_attr))
|
states[sns1].append(set_state(sns1, "10", attributes=sns1_attr))
|
||||||
states[sns2].append(set_state(sns2, "10", attributes=sns2_attr))
|
states[sns2].append(set_state(sns2, "10", attributes=sns2_attr))
|
||||||
states[sns3].append(set_state(sns3, "10", attributes=sns3_attr))
|
states[sns3].append(set_state(sns3, "10", attributes=sns3_attr))
|
||||||
|
states[sns4].append(set_state(sns4, "10", attributes=sns4_attr))
|
||||||
|
|
||||||
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=two):
|
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=two):
|
||||||
states[sns1].append(set_state(sns1, "15", attributes=sns1_attr))
|
states[sns1].append(set_state(sns1, "15", attributes=sns1_attr))
|
||||||
states[sns2].append(set_state(sns2, "15", attributes=sns2_attr))
|
states[sns2].append(set_state(sns2, "15", attributes=sns2_attr))
|
||||||
states[sns3].append(set_state(sns3, "15", attributes=sns3_attr))
|
states[sns3].append(set_state(sns3, "15", attributes=sns3_attr))
|
||||||
|
states[sns4].append(set_state(sns4, "15", attributes=sns4_attr))
|
||||||
|
|
||||||
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=three):
|
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=three):
|
||||||
states[sns1].append(set_state(sns1, "20", attributes=sns1_attr))
|
states[sns1].append(set_state(sns1, "20", attributes=sns1_attr))
|
||||||
states[sns2].append(set_state(sns2, "20", attributes=sns2_attr))
|
states[sns2].append(set_state(sns2, "20", attributes=sns2_attr))
|
||||||
states[sns3].append(set_state(sns3, "20", attributes=sns3_attr))
|
states[sns3].append(set_state(sns3, "20", attributes=sns3_attr))
|
||||||
|
states[sns4].append(set_state(sns4, "20", attributes=sns4_attr))
|
||||||
|
|
||||||
return zero, four, states
|
return zero, four, states
|
||||||
|
Loading…
x
Reference in New Issue
Block a user