mirror of
https://github.com/home-assistant/core.git
synced 2025-04-24 17:27:52 +00:00
Remove state_unit_of_measurement from metadata DB table (#79370)
* Remove state_unit_of_measurement from metadata DB table * Adjust test
This commit is contained in:
parent
4cfcf562b5
commit
2de273500e
@ -295,7 +295,6 @@ async def _insert_statistics(hass: HomeAssistant) -> None:
|
||||
metadata: StatisticMetaData = {
|
||||
"source": DOMAIN,
|
||||
"name": "Outdoor temperature",
|
||||
"state_unit_of_measurement": TEMP_CELSIUS,
|
||||
"statistic_id": f"{DOMAIN}:temperature_outdoor",
|
||||
"unit_of_measurement": TEMP_CELSIUS,
|
||||
"has_mean": True,
|
||||
@ -309,7 +308,6 @@ async def _insert_statistics(hass: HomeAssistant) -> None:
|
||||
metadata = {
|
||||
"source": DOMAIN,
|
||||
"name": "Energy consumption 1",
|
||||
"state_unit_of_measurement": ENERGY_KILO_WATT_HOUR,
|
||||
"statistic_id": f"{DOMAIN}:energy_consumption_kwh",
|
||||
"unit_of_measurement": ENERGY_KILO_WATT_HOUR,
|
||||
"has_mean": False,
|
||||
@ -322,7 +320,6 @@ async def _insert_statistics(hass: HomeAssistant) -> None:
|
||||
metadata = {
|
||||
"source": DOMAIN,
|
||||
"name": "Energy consumption 2",
|
||||
"state_unit_of_measurement": ENERGY_MEGA_WATT_HOUR,
|
||||
"statistic_id": f"{DOMAIN}:energy_consumption_mwh",
|
||||
"unit_of_measurement": ENERGY_MEGA_WATT_HOUR,
|
||||
"has_mean": False,
|
||||
@ -337,7 +334,6 @@ async def _insert_statistics(hass: HomeAssistant) -> None:
|
||||
metadata = {
|
||||
"source": DOMAIN,
|
||||
"name": "Gas consumption 1",
|
||||
"state_unit_of_measurement": VOLUME_CUBIC_METERS,
|
||||
"statistic_id": f"{DOMAIN}:gas_consumption_m3",
|
||||
"unit_of_measurement": VOLUME_CUBIC_METERS,
|
||||
"has_mean": False,
|
||||
@ -352,7 +348,6 @@ async def _insert_statistics(hass: HomeAssistant) -> None:
|
||||
metadata = {
|
||||
"source": DOMAIN,
|
||||
"name": "Gas consumption 2",
|
||||
"state_unit_of_measurement": VOLUME_CUBIC_FEET,
|
||||
"statistic_id": f"{DOMAIN}:gas_consumption_ft3",
|
||||
"unit_of_measurement": VOLUME_CUBIC_FEET,
|
||||
"has_mean": False,
|
||||
|
@ -494,7 +494,6 @@ class StatisticsMeta(Base): # type: ignore[misc,valid-type]
|
||||
id = Column(Integer, Identity(), primary_key=True)
|
||||
statistic_id = Column(String(255), index=True, unique=True)
|
||||
source = Column(String(32))
|
||||
state_unit_of_measurement = Column(String(255))
|
||||
unit_of_measurement = Column(String(255))
|
||||
has_mean = Column(Boolean)
|
||||
has_sum = Column(Boolean)
|
||||
|
@ -748,24 +748,9 @@ def _apply_update( # noqa: C901
|
||||
session_maker, "statistics_meta", "ix_statistics_meta_statistic_id"
|
||||
)
|
||||
elif new_version == 30:
|
||||
_add_columns(
|
||||
session_maker,
|
||||
"statistics_meta",
|
||||
["state_unit_of_measurement VARCHAR(255)"],
|
||||
)
|
||||
# When querying the database, be careful to only explicitly query for columns
|
||||
# which were present in schema version 30. If querying the table, SQLAlchemy
|
||||
# will refer to future columns.
|
||||
with session_scope(session=session_maker()) as session:
|
||||
for statistics_meta in session.query(
|
||||
StatisticsMeta.id, StatisticsMeta.unit_of_measurement
|
||||
):
|
||||
session.query(StatisticsMeta).filter_by(id=statistics_meta.id).update(
|
||||
{
|
||||
StatisticsMeta.state_unit_of_measurement: statistics_meta.unit_of_measurement,
|
||||
},
|
||||
synchronize_session=False,
|
||||
)
|
||||
# This added a column to the statistics_meta table, removed again before
|
||||
# release of HA Core 2022.10.0
|
||||
pass
|
||||
else:
|
||||
raise ValueError(f"No schema migration defined for version {new_version}")
|
||||
|
||||
|
@ -64,7 +64,6 @@ class StatisticMetaData(TypedDict):
|
||||
has_sum: bool
|
||||
name: str | None
|
||||
source: str
|
||||
state_unit_of_measurement: str | None
|
||||
statistic_id: str
|
||||
unit_of_measurement: str | None
|
||||
|
||||
|
@ -24,6 +24,7 @@ from sqlalchemy.sql.lambdas import StatementLambdaElement
|
||||
from sqlalchemy.sql.selectable import Subquery
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT
|
||||
from homeassistant.core import Event, HomeAssistant, callback, valid_entity_id
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry
|
||||
@ -116,7 +117,6 @@ QUERY_STATISTIC_META = [
|
||||
StatisticsMeta.id,
|
||||
StatisticsMeta.statistic_id,
|
||||
StatisticsMeta.source,
|
||||
StatisticsMeta.state_unit_of_measurement,
|
||||
StatisticsMeta.unit_of_measurement,
|
||||
StatisticsMeta.has_mean,
|
||||
StatisticsMeta.has_sum,
|
||||
@ -342,8 +342,6 @@ def _update_or_add_metadata(
|
||||
old_metadata["has_mean"] != new_metadata["has_mean"]
|
||||
or old_metadata["has_sum"] != new_metadata["has_sum"]
|
||||
or old_metadata["name"] != new_metadata["name"]
|
||||
or old_metadata["state_unit_of_measurement"]
|
||||
!= new_metadata["state_unit_of_measurement"]
|
||||
or old_metadata["unit_of_measurement"] != new_metadata["unit_of_measurement"]
|
||||
):
|
||||
session.query(StatisticsMeta).filter_by(statistic_id=statistic_id).update(
|
||||
@ -351,9 +349,6 @@ def _update_or_add_metadata(
|
||||
StatisticsMeta.has_mean: new_metadata["has_mean"],
|
||||
StatisticsMeta.has_sum: new_metadata["has_sum"],
|
||||
StatisticsMeta.name: new_metadata["name"],
|
||||
StatisticsMeta.state_unit_of_measurement: new_metadata[
|
||||
"state_unit_of_measurement"
|
||||
],
|
||||
StatisticsMeta.unit_of_measurement: new_metadata["unit_of_measurement"],
|
||||
},
|
||||
synchronize_session=False,
|
||||
@ -820,7 +815,6 @@ def get_metadata_with_session(
|
||||
"has_sum": meta["has_sum"],
|
||||
"name": meta["name"],
|
||||
"source": meta["source"],
|
||||
"state_unit_of_measurement": meta["state_unit_of_measurement"],
|
||||
"statistic_id": meta["statistic_id"],
|
||||
"unit_of_measurement": meta["unit_of_measurement"],
|
||||
},
|
||||
@ -899,7 +893,6 @@ def list_statistic_ids(
|
||||
|
||||
result = {
|
||||
meta["statistic_id"]: {
|
||||
"state_unit_of_measurement": meta["state_unit_of_measurement"],
|
||||
"has_mean": meta["has_mean"],
|
||||
"has_sum": meta["has_sum"],
|
||||
"name": meta["name"],
|
||||
@ -926,7 +919,6 @@ def list_statistic_ids(
|
||||
"has_sum": meta["has_sum"],
|
||||
"name": meta["name"],
|
||||
"source": meta["source"],
|
||||
"state_unit_of_measurement": meta["state_unit_of_measurement"],
|
||||
"unit_class": _get_unit_class(meta["unit_of_measurement"]),
|
||||
"unit_of_measurement": meta["unit_of_measurement"],
|
||||
}
|
||||
@ -939,7 +931,6 @@ def list_statistic_ids(
|
||||
"has_sum": info["has_sum"],
|
||||
"name": info.get("name"),
|
||||
"source": info["source"],
|
||||
"state_unit_of_measurement": info["state_unit_of_measurement"],
|
||||
"statistics_unit_of_measurement": info["unit_of_measurement"],
|
||||
"unit_class": info["unit_class"],
|
||||
}
|
||||
@ -1386,9 +1377,10 @@ def _sorted_statistics_to_dict(
|
||||
|
||||
# Append all statistic entries, and optionally do unit conversion
|
||||
for meta_id, group in groupby(stats, lambda stat: stat.metadata_id): # type: ignore[no-any-return]
|
||||
unit = metadata[meta_id]["unit_of_measurement"]
|
||||
state_unit = metadata[meta_id]["state_unit_of_measurement"]
|
||||
state_unit = unit = metadata[meta_id]["unit_of_measurement"]
|
||||
statistic_id = metadata[meta_id]["statistic_id"]
|
||||
if state := hass.states.get(statistic_id):
|
||||
state_unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
|
||||
if unit is not None and convert_units:
|
||||
convert = _get_statistic_to_display_unit_converter(unit, state_unit, units)
|
||||
else:
|
||||
@ -1470,18 +1462,6 @@ def _async_import_statistics(
|
||||
get_instance(hass).async_import_statistics(metadata, statistics)
|
||||
|
||||
|
||||
def _validate_units(statistics_unit: str | None, state_unit: str | None) -> None:
|
||||
"""Raise if the statistics unit and state unit are not compatible."""
|
||||
if statistics_unit == state_unit:
|
||||
return
|
||||
if (
|
||||
unit_converter := STATISTIC_UNIT_TO_UNIT_CONVERTER.get(statistics_unit)
|
||||
) is None:
|
||||
raise HomeAssistantError(f"Invalid units {statistics_unit},{state_unit}")
|
||||
if state_unit not in unit_converter.VALID_UNITS:
|
||||
raise HomeAssistantError(f"Invalid units {statistics_unit},{state_unit}")
|
||||
|
||||
|
||||
@callback
|
||||
def async_import_statistics(
|
||||
hass: HomeAssistant,
|
||||
@ -1499,10 +1479,6 @@ def async_import_statistics(
|
||||
if not metadata["source"] or metadata["source"] != DOMAIN:
|
||||
raise HomeAssistantError("Invalid source")
|
||||
|
||||
_validate_units(
|
||||
metadata["unit_of_measurement"], metadata["state_unit_of_measurement"]
|
||||
)
|
||||
|
||||
_async_import_statistics(hass, metadata, statistics)
|
||||
|
||||
|
||||
@ -1525,10 +1501,6 @@ def async_add_external_statistics(
|
||||
if not metadata["source"] or metadata["source"] != domain:
|
||||
raise HomeAssistantError("Invalid source")
|
||||
|
||||
_validate_units(
|
||||
metadata["unit_of_measurement"], metadata["state_unit_of_measurement"]
|
||||
)
|
||||
|
||||
_async_import_statistics(hass, metadata, statistics)
|
||||
|
||||
|
||||
|
@ -376,7 +376,6 @@ def ws_import_statistics(
|
||||
"""Import statistics."""
|
||||
metadata = msg["metadata"]
|
||||
stats = msg["stats"]
|
||||
metadata["state_unit_of_measurement"] = metadata["unit_of_measurement"]
|
||||
|
||||
if valid_entity_id(metadata["statistic_id"]):
|
||||
async_import_statistics(hass, metadata, stats)
|
||||
|
@ -480,7 +480,6 @@ def _compile_statistics( # noqa: C901
|
||||
"has_sum": "sum" in wanted_statistics[entity_id],
|
||||
"name": None,
|
||||
"source": RECORDER_DOMAIN,
|
||||
"state_unit_of_measurement": state_unit,
|
||||
"statistic_id": entity_id,
|
||||
"unit_of_measurement": normalized_unit,
|
||||
}
|
||||
@ -621,7 +620,6 @@ def list_statistic_ids(
|
||||
"has_sum": "sum" in provided_statistics,
|
||||
"name": None,
|
||||
"source": RECORDER_DOMAIN,
|
||||
"state_unit_of_measurement": state_unit,
|
||||
"statistic_id": state.entity_id,
|
||||
"unit_of_measurement": state_unit,
|
||||
}
|
||||
@ -637,7 +635,6 @@ def list_statistic_ids(
|
||||
"has_sum": "sum" in provided_statistics,
|
||||
"name": None,
|
||||
"source": RECORDER_DOMAIN,
|
||||
"state_unit_of_measurement": state_unit,
|
||||
"statistic_id": state.entity_id,
|
||||
"unit_of_measurement": statistics_unit,
|
||||
}
|
||||
|
@ -642,7 +642,6 @@ class TibberDataCoordinator(DataUpdateCoordinator):
|
||||
has_sum=True,
|
||||
name=f"{home.name} {sensor_type}",
|
||||
source=TIBBER_DOMAIN,
|
||||
state_unit_of_measurement=unit,
|
||||
statistic_id=statistic_id,
|
||||
unit_of_measurement=unit,
|
||||
)
|
||||
|
@ -67,7 +67,6 @@ async def test_demo_statistics(hass, recorder_mock):
|
||||
"has_sum": False,
|
||||
"name": "Outdoor temperature",
|
||||
"source": "demo",
|
||||
"state_unit_of_measurement": "°C",
|
||||
"statistic_id": "demo:temperature_outdoor",
|
||||
"statistics_unit_of_measurement": "°C",
|
||||
"unit_class": "temperature",
|
||||
@ -77,7 +76,6 @@ async def test_demo_statistics(hass, recorder_mock):
|
||||
"has_sum": True,
|
||||
"name": "Energy consumption 1",
|
||||
"source": "demo",
|
||||
"state_unit_of_measurement": "kWh",
|
||||
"statistic_id": "demo:energy_consumption_kwh",
|
||||
"statistics_unit_of_measurement": "kWh",
|
||||
"unit_class": "energy",
|
||||
@ -96,7 +94,6 @@ async def test_demo_statistics_growth(hass, recorder_mock):
|
||||
metadata = {
|
||||
"source": DOMAIN,
|
||||
"name": "Energy consumption 1",
|
||||
"state_unit_of_measurement": "m³",
|
||||
"statistic_id": statistic_id,
|
||||
"unit_of_measurement": "m³",
|
||||
"has_mean": False,
|
||||
|
@ -336,7 +336,6 @@ async def test_fossil_energy_consumption_no_co2(hass, hass_ws_client, recorder_m
|
||||
"has_sum": True,
|
||||
"name": "Total imported energy",
|
||||
"source": "test",
|
||||
"state_unit_of_measurement": "kWh",
|
||||
"statistic_id": "test:total_energy_import_tariff_1",
|
||||
"unit_of_measurement": "kWh",
|
||||
}
|
||||
@ -371,7 +370,6 @@ async def test_fossil_energy_consumption_no_co2(hass, hass_ws_client, recorder_m
|
||||
"has_sum": True,
|
||||
"name": "Total imported energy",
|
||||
"source": "test",
|
||||
"state_unit_of_measurement": "kWh",
|
||||
"statistic_id": "test:total_energy_import_tariff_2",
|
||||
"unit_of_measurement": "kWh",
|
||||
}
|
||||
@ -499,7 +497,6 @@ async def test_fossil_energy_consumption_hole(hass, hass_ws_client, recorder_moc
|
||||
"has_sum": True,
|
||||
"name": "Total imported energy",
|
||||
"source": "test",
|
||||
"state_unit_of_measurement": "kWh",
|
||||
"statistic_id": "test:total_energy_import_tariff_1",
|
||||
"unit_of_measurement": "kWh",
|
||||
}
|
||||
@ -534,7 +531,6 @@ async def test_fossil_energy_consumption_hole(hass, hass_ws_client, recorder_moc
|
||||
"has_sum": True,
|
||||
"name": "Total imported energy",
|
||||
"source": "test",
|
||||
"state_unit_of_measurement": "kWh",
|
||||
"statistic_id": "test:total_energy_import_tariff_2",
|
||||
"unit_of_measurement": "kWh",
|
||||
}
|
||||
@ -660,7 +656,6 @@ async def test_fossil_energy_consumption_no_data(hass, hass_ws_client, recorder_
|
||||
"has_sum": True,
|
||||
"name": "Total imported energy",
|
||||
"source": "test",
|
||||
"state_unit_of_measurement": "kWh",
|
||||
"statistic_id": "test:total_energy_import_tariff_1",
|
||||
"unit_of_measurement": "kWh",
|
||||
}
|
||||
@ -695,7 +690,6 @@ async def test_fossil_energy_consumption_no_data(hass, hass_ws_client, recorder_
|
||||
"has_sum": True,
|
||||
"name": "Total imported energy",
|
||||
"source": "test",
|
||||
"state_unit_of_measurement": "kWh",
|
||||
"statistic_id": "test:total_energy_import_tariff_2",
|
||||
"unit_of_measurement": "kWh",
|
||||
}
|
||||
@ -812,7 +806,6 @@ async def test_fossil_energy_consumption(hass, hass_ws_client, recorder_mock):
|
||||
"has_sum": True,
|
||||
"name": "Total imported energy",
|
||||
"source": "test",
|
||||
"state_unit_of_measurement": "kWh",
|
||||
"statistic_id": "test:total_energy_import_tariff_1",
|
||||
"unit_of_measurement": "kWh",
|
||||
}
|
||||
@ -847,7 +840,6 @@ async def test_fossil_energy_consumption(hass, hass_ws_client, recorder_mock):
|
||||
"has_sum": True,
|
||||
"name": "Total imported energy",
|
||||
"source": "test",
|
||||
"state_unit_of_measurement": "kWh",
|
||||
"statistic_id": "test:total_energy_import_tariff_2",
|
||||
"unit_of_measurement": "kWh",
|
||||
}
|
||||
@ -878,7 +870,6 @@ async def test_fossil_energy_consumption(hass, hass_ws_client, recorder_mock):
|
||||
"has_sum": False,
|
||||
"name": "Fossil percentage",
|
||||
"source": "test",
|
||||
"state_unit_of_measurement": "%",
|
||||
"statistic_id": "test:fossil_percentage",
|
||||
"unit_of_measurement": "%",
|
||||
}
|
||||
|
@ -1,616 +0,0 @@
|
||||
"""Models for SQLAlchemy.
|
||||
|
||||
This file contains the model definitions for schema version 28.
|
||||
It is used to test the schema migration logic.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from typing import Any, TypeVar, cast
|
||||
|
||||
import ciso8601
|
||||
from fnvhash import fnv1a_32
|
||||
from sqlalchemy import (
|
||||
JSON,
|
||||
BigInteger,
|
||||
Boolean,
|
||||
Column,
|
||||
DateTime,
|
||||
Float,
|
||||
ForeignKey,
|
||||
Identity,
|
||||
Index,
|
||||
Integer,
|
||||
SmallInteger,
|
||||
String,
|
||||
Text,
|
||||
distinct,
|
||||
type_coerce,
|
||||
)
|
||||
from sqlalchemy.dialects import mysql, oracle, postgresql, sqlite
|
||||
from sqlalchemy.ext.declarative import declared_attr
|
||||
from sqlalchemy.orm import aliased, declarative_base, relationship
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
from homeassistant.components.recorder.const import ALL_DOMAIN_EXCLUDE_ATTRS
|
||||
from homeassistant.components.recorder.models import (
|
||||
StatisticData,
|
||||
StatisticMetaData,
|
||||
process_timestamp,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
MAX_LENGTH_EVENT_CONTEXT_ID,
|
||||
MAX_LENGTH_EVENT_EVENT_TYPE,
|
||||
MAX_LENGTH_EVENT_ORIGIN,
|
||||
MAX_LENGTH_STATE_ENTITY_ID,
|
||||
MAX_LENGTH_STATE_STATE,
|
||||
)
|
||||
from homeassistant.core import Context, Event, EventOrigin, State, split_entity_id
|
||||
from homeassistant.helpers.json import (
|
||||
JSON_DECODE_EXCEPTIONS,
|
||||
JSON_DUMP,
|
||||
json_bytes,
|
||||
json_loads,
|
||||
)
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
# SQLAlchemy Schema
|
||||
# pylint: disable=invalid-name
|
||||
Base = declarative_base()
|
||||
|
||||
SCHEMA_VERSION = 29
|
||||
|
||||
_StatisticsBaseSelfT = TypeVar("_StatisticsBaseSelfT", bound="StatisticsBase")
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
TABLE_EVENTS = "events"
|
||||
TABLE_EVENT_DATA = "event_data"
|
||||
TABLE_STATES = "states"
|
||||
TABLE_STATE_ATTRIBUTES = "state_attributes"
|
||||
TABLE_RECORDER_RUNS = "recorder_runs"
|
||||
TABLE_SCHEMA_CHANGES = "schema_changes"
|
||||
TABLE_STATISTICS = "statistics"
|
||||
TABLE_STATISTICS_META = "statistics_meta"
|
||||
TABLE_STATISTICS_RUNS = "statistics_runs"
|
||||
TABLE_STATISTICS_SHORT_TERM = "statistics_short_term"
|
||||
|
||||
ALL_TABLES = [
|
||||
TABLE_STATES,
|
||||
TABLE_STATE_ATTRIBUTES,
|
||||
TABLE_EVENTS,
|
||||
TABLE_EVENT_DATA,
|
||||
TABLE_RECORDER_RUNS,
|
||||
TABLE_SCHEMA_CHANGES,
|
||||
TABLE_STATISTICS,
|
||||
TABLE_STATISTICS_META,
|
||||
TABLE_STATISTICS_RUNS,
|
||||
TABLE_STATISTICS_SHORT_TERM,
|
||||
]
|
||||
|
||||
TABLES_TO_CHECK = [
|
||||
TABLE_STATES,
|
||||
TABLE_EVENTS,
|
||||
TABLE_RECORDER_RUNS,
|
||||
TABLE_SCHEMA_CHANGES,
|
||||
]
|
||||
|
||||
LAST_UPDATED_INDEX = "ix_states_last_updated"
|
||||
ENTITY_ID_LAST_UPDATED_INDEX = "ix_states_entity_id_last_updated"
|
||||
EVENTS_CONTEXT_ID_INDEX = "ix_events_context_id"
|
||||
STATES_CONTEXT_ID_INDEX = "ix_states_context_id"
|
||||
|
||||
|
||||
class FAST_PYSQLITE_DATETIME(sqlite.DATETIME): # type: ignore[misc]
|
||||
"""Use ciso8601 to parse datetimes instead of sqlalchemy built-in regex."""
|
||||
|
||||
def result_processor(self, dialect, coltype): # type: ignore[no-untyped-def]
|
||||
"""Offload the datetime parsing to ciso8601."""
|
||||
return lambda value: None if value is None else ciso8601.parse_datetime(value)
|
||||
|
||||
|
||||
JSON_VARIENT_CAST = Text().with_variant(
|
||||
postgresql.JSON(none_as_null=True), "postgresql"
|
||||
)
|
||||
JSONB_VARIENT_CAST = Text().with_variant(
|
||||
postgresql.JSONB(none_as_null=True), "postgresql"
|
||||
)
|
||||
DATETIME_TYPE = (
|
||||
DateTime(timezone=True)
|
||||
.with_variant(mysql.DATETIME(timezone=True, fsp=6), "mysql")
|
||||
.with_variant(FAST_PYSQLITE_DATETIME(), "sqlite")
|
||||
)
|
||||
DOUBLE_TYPE = (
|
||||
Float()
|
||||
.with_variant(mysql.DOUBLE(asdecimal=False), "mysql")
|
||||
.with_variant(oracle.DOUBLE_PRECISION(), "oracle")
|
||||
.with_variant(postgresql.DOUBLE_PRECISION(), "postgresql")
|
||||
)
|
||||
|
||||
|
||||
class JSONLiteral(JSON): # type: ignore[misc]
|
||||
"""Teach SA how to literalize json."""
|
||||
|
||||
def literal_processor(self, dialect: str) -> Callable[[Any], str]:
|
||||
"""Processor to convert a value to JSON."""
|
||||
|
||||
def process(value: Any) -> str:
|
||||
"""Dump json."""
|
||||
return JSON_DUMP(value)
|
||||
|
||||
return process
|
||||
|
||||
|
||||
EVENT_ORIGIN_ORDER = [EventOrigin.local, EventOrigin.remote]
|
||||
EVENT_ORIGIN_TO_IDX = {origin: idx for idx, origin in enumerate(EVENT_ORIGIN_ORDER)}
|
||||
|
||||
|
||||
class Events(Base): # type: ignore[misc,valid-type]
|
||||
"""Event history data."""
|
||||
|
||||
__table_args__ = (
|
||||
# Used for fetching events at a specific time
|
||||
# see logbook
|
||||
Index("ix_events_event_type_time_fired", "event_type", "time_fired"),
|
||||
{"mysql_default_charset": "utf8mb4", "mysql_collate": "utf8mb4_unicode_ci"},
|
||||
)
|
||||
__tablename__ = TABLE_EVENTS
|
||||
event_id = Column(Integer, Identity(), primary_key=True)
|
||||
event_type = Column(String(MAX_LENGTH_EVENT_EVENT_TYPE))
|
||||
event_data = Column(Text().with_variant(mysql.LONGTEXT, "mysql"))
|
||||
origin = Column(String(MAX_LENGTH_EVENT_ORIGIN)) # no longer used for new rows
|
||||
origin_idx = Column(SmallInteger)
|
||||
time_fired = Column(DATETIME_TYPE, index=True)
|
||||
context_id = Column(String(MAX_LENGTH_EVENT_CONTEXT_ID), index=True)
|
||||
context_user_id = Column(String(MAX_LENGTH_EVENT_CONTEXT_ID))
|
||||
context_parent_id = Column(String(MAX_LENGTH_EVENT_CONTEXT_ID))
|
||||
data_id = Column(Integer, ForeignKey("event_data.data_id"), index=True)
|
||||
event_data_rel = relationship("EventData")
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Return string representation of instance for debugging."""
|
||||
return (
|
||||
f"<recorder.Events("
|
||||
f"id={self.event_id}, type='{self.event_type}', "
|
||||
f"origin_idx='{self.origin_idx}', time_fired='{self.time_fired}'"
|
||||
f", data_id={self.data_id})>"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def from_event(event: Event) -> Events:
|
||||
"""Create an event database object from a native event."""
|
||||
return Events(
|
||||
event_type=event.event_type,
|
||||
event_data=None,
|
||||
origin_idx=EVENT_ORIGIN_TO_IDX.get(event.origin),
|
||||
time_fired=event.time_fired,
|
||||
context_id=event.context.id,
|
||||
context_user_id=event.context.user_id,
|
||||
context_parent_id=event.context.parent_id,
|
||||
)
|
||||
|
||||
def to_native(self, validate_entity_id: bool = True) -> Event | None:
|
||||
"""Convert to a native HA Event."""
|
||||
context = Context(
|
||||
id=self.context_id,
|
||||
user_id=self.context_user_id,
|
||||
parent_id=self.context_parent_id,
|
||||
)
|
||||
try:
|
||||
return Event(
|
||||
self.event_type,
|
||||
json_loads(self.event_data) if self.event_data else {},
|
||||
EventOrigin(self.origin)
|
||||
if self.origin
|
||||
else EVENT_ORIGIN_ORDER[self.origin_idx],
|
||||
process_timestamp(self.time_fired),
|
||||
context=context,
|
||||
)
|
||||
except JSON_DECODE_EXCEPTIONS:
|
||||
# When json_loads fails
|
||||
_LOGGER.exception("Error converting to event: %s", self)
|
||||
return None
|
||||
|
||||
|
||||
class EventData(Base): # type: ignore[misc,valid-type]
|
||||
"""Event data history."""
|
||||
|
||||
__table_args__ = (
|
||||
{"mysql_default_charset": "utf8mb4", "mysql_collate": "utf8mb4_unicode_ci"},
|
||||
)
|
||||
__tablename__ = TABLE_EVENT_DATA
|
||||
data_id = Column(Integer, Identity(), primary_key=True)
|
||||
hash = Column(BigInteger, index=True)
|
||||
# Note that this is not named attributes to avoid confusion with the states table
|
||||
shared_data = Column(Text().with_variant(mysql.LONGTEXT, "mysql"))
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Return string representation of instance for debugging."""
|
||||
return (
|
||||
f"<recorder.EventData("
|
||||
f"id={self.data_id}, hash='{self.hash}', data='{self.shared_data}'"
|
||||
f")>"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def from_event(event: Event) -> EventData:
|
||||
"""Create object from an event."""
|
||||
shared_data = json_bytes(event.data)
|
||||
return EventData(
|
||||
shared_data=shared_data.decode("utf-8"),
|
||||
hash=EventData.hash_shared_data_bytes(shared_data),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def shared_data_bytes_from_event(event: Event) -> bytes:
|
||||
"""Create shared_data from an event."""
|
||||
return json_bytes(event.data)
|
||||
|
||||
@staticmethod
|
||||
def hash_shared_data_bytes(shared_data_bytes: bytes) -> int:
|
||||
"""Return the hash of json encoded shared data."""
|
||||
return cast(int, fnv1a_32(shared_data_bytes))
|
||||
|
||||
def to_native(self) -> dict[str, Any]:
|
||||
"""Convert to an HA state object."""
|
||||
try:
|
||||
return cast(dict[str, Any], json_loads(self.shared_data))
|
||||
except JSON_DECODE_EXCEPTIONS:
|
||||
_LOGGER.exception("Error converting row to event data: %s", self)
|
||||
return {}
|
||||
|
||||
|
||||
class States(Base): # type: ignore[misc,valid-type]
|
||||
"""State change history."""
|
||||
|
||||
__table_args__ = (
|
||||
# Used for fetching the state of entities at a specific time
|
||||
# (get_states in history.py)
|
||||
Index(ENTITY_ID_LAST_UPDATED_INDEX, "entity_id", "last_updated"),
|
||||
{"mysql_default_charset": "utf8mb4", "mysql_collate": "utf8mb4_unicode_ci"},
|
||||
)
|
||||
__tablename__ = TABLE_STATES
|
||||
state_id = Column(Integer, Identity(), primary_key=True)
|
||||
entity_id = Column(String(MAX_LENGTH_STATE_ENTITY_ID))
|
||||
state = Column(String(MAX_LENGTH_STATE_STATE))
|
||||
attributes = Column(
|
||||
Text().with_variant(mysql.LONGTEXT, "mysql")
|
||||
) # no longer used for new rows
|
||||
event_id = Column( # no longer used for new rows
|
||||
Integer, ForeignKey("events.event_id", ondelete="CASCADE"), index=True
|
||||
)
|
||||
last_changed = Column(DATETIME_TYPE)
|
||||
last_updated = Column(DATETIME_TYPE, default=dt_util.utcnow, index=True)
|
||||
old_state_id = Column(Integer, ForeignKey("states.state_id"), index=True)
|
||||
attributes_id = Column(
|
||||
Integer, ForeignKey("state_attributes.attributes_id"), index=True
|
||||
)
|
||||
context_id = Column(String(MAX_LENGTH_EVENT_CONTEXT_ID), index=True)
|
||||
context_user_id = Column(String(MAX_LENGTH_EVENT_CONTEXT_ID))
|
||||
context_parent_id = Column(String(MAX_LENGTH_EVENT_CONTEXT_ID))
|
||||
origin_idx = Column(SmallInteger) # 0 is local, 1 is remote
|
||||
old_state = relationship("States", remote_side=[state_id])
|
||||
state_attributes = relationship("StateAttributes")
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Return string representation of instance for debugging."""
|
||||
return (
|
||||
f"<recorder.States("
|
||||
f"id={self.state_id}, entity_id='{self.entity_id}', "
|
||||
f"state='{self.state}', event_id='{self.event_id}', "
|
||||
f"last_updated='{self.last_updated.isoformat(sep=' ', timespec='seconds')}', "
|
||||
f"old_state_id={self.old_state_id}, attributes_id={self.attributes_id}"
|
||||
f")>"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def from_event(event: Event) -> States:
|
||||
"""Create object from a state_changed event."""
|
||||
entity_id = event.data["entity_id"]
|
||||
state: State | None = event.data.get("new_state")
|
||||
dbstate = States(
|
||||
entity_id=entity_id,
|
||||
attributes=None,
|
||||
context_id=event.context.id,
|
||||
context_user_id=event.context.user_id,
|
||||
context_parent_id=event.context.parent_id,
|
||||
origin_idx=EVENT_ORIGIN_TO_IDX.get(event.origin),
|
||||
)
|
||||
|
||||
# None state means the state was removed from the state machine
|
||||
if state is None:
|
||||
dbstate.state = ""
|
||||
dbstate.last_updated = event.time_fired
|
||||
dbstate.last_changed = None
|
||||
return dbstate
|
||||
|
||||
dbstate.state = state.state
|
||||
dbstate.last_updated = state.last_updated
|
||||
if state.last_updated == state.last_changed:
|
||||
dbstate.last_changed = None
|
||||
else:
|
||||
dbstate.last_changed = state.last_changed
|
||||
|
||||
return dbstate
|
||||
|
||||
def to_native(self, validate_entity_id: bool = True) -> State | None:
|
||||
"""Convert to an HA state object."""
|
||||
context = Context(
|
||||
id=self.context_id,
|
||||
user_id=self.context_user_id,
|
||||
parent_id=self.context_parent_id,
|
||||
)
|
||||
try:
|
||||
attrs = json_loads(self.attributes) if self.attributes else {}
|
||||
except JSON_DECODE_EXCEPTIONS:
|
||||
# When json_loads fails
|
||||
_LOGGER.exception("Error converting row to state: %s", self)
|
||||
return None
|
||||
if self.last_changed is None or self.last_changed == self.last_updated:
|
||||
last_changed = last_updated = process_timestamp(self.last_updated)
|
||||
else:
|
||||
last_updated = process_timestamp(self.last_updated)
|
||||
last_changed = process_timestamp(self.last_changed)
|
||||
return State(
|
||||
self.entity_id,
|
||||
self.state,
|
||||
# Join the state_attributes table on attributes_id to get the attributes
|
||||
# for newer states
|
||||
attrs,
|
||||
last_changed,
|
||||
last_updated,
|
||||
context=context,
|
||||
validate_entity_id=validate_entity_id,
|
||||
)
|
||||
|
||||
|
||||
class StateAttributes(Base): # type: ignore[misc,valid-type]
|
||||
"""State attribute change history."""
|
||||
|
||||
__table_args__ = (
|
||||
{"mysql_default_charset": "utf8mb4", "mysql_collate": "utf8mb4_unicode_ci"},
|
||||
)
|
||||
__tablename__ = TABLE_STATE_ATTRIBUTES
|
||||
attributes_id = Column(Integer, Identity(), primary_key=True)
|
||||
hash = Column(BigInteger, index=True)
|
||||
# Note that this is not named attributes to avoid confusion with the states table
|
||||
shared_attrs = Column(Text().with_variant(mysql.LONGTEXT, "mysql"))
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Return string representation of instance for debugging."""
|
||||
return (
|
||||
f"<recorder.StateAttributes("
|
||||
f"id={self.attributes_id}, hash='{self.hash}', attributes='{self.shared_attrs}'"
|
||||
f")>"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def from_event(event: Event) -> StateAttributes:
|
||||
"""Create object from a state_changed event."""
|
||||
state: State | None = event.data.get("new_state")
|
||||
# None state means the state was removed from the state machine
|
||||
attr_bytes = b"{}" if state is None else json_bytes(state.attributes)
|
||||
dbstate = StateAttributes(shared_attrs=attr_bytes.decode("utf-8"))
|
||||
dbstate.hash = StateAttributes.hash_shared_attrs_bytes(attr_bytes)
|
||||
return dbstate
|
||||
|
||||
@staticmethod
|
||||
def shared_attrs_bytes_from_event(
|
||||
event: Event, exclude_attrs_by_domain: dict[str, set[str]]
|
||||
) -> bytes:
|
||||
"""Create shared_attrs from a state_changed event."""
|
||||
state: State | None = event.data.get("new_state")
|
||||
# None state means the state was removed from the state machine
|
||||
if state is None:
|
||||
return b"{}"
|
||||
domain = split_entity_id(state.entity_id)[0]
|
||||
exclude_attrs = (
|
||||
exclude_attrs_by_domain.get(domain, set()) | ALL_DOMAIN_EXCLUDE_ATTRS
|
||||
)
|
||||
return json_bytes(
|
||||
{k: v for k, v in state.attributes.items() if k not in exclude_attrs}
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def hash_shared_attrs_bytes(shared_attrs_bytes: bytes) -> int:
|
||||
"""Return the hash of json encoded shared attributes."""
|
||||
return cast(int, fnv1a_32(shared_attrs_bytes))
|
||||
|
||||
def to_native(self) -> dict[str, Any]:
|
||||
"""Convert to an HA state object."""
|
||||
try:
|
||||
return cast(dict[str, Any], json_loads(self.shared_attrs))
|
||||
except JSON_DECODE_EXCEPTIONS:
|
||||
# When json_loads fails
|
||||
_LOGGER.exception("Error converting row to state attributes: %s", self)
|
||||
return {}
|
||||
|
||||
|
||||
class StatisticsBase:
|
||||
"""Statistics base class."""
|
||||
|
||||
id = Column(Integer, Identity(), primary_key=True)
|
||||
created = Column(DATETIME_TYPE, default=dt_util.utcnow)
|
||||
|
||||
@declared_attr # type: ignore[misc]
|
||||
def metadata_id(self) -> Column:
|
||||
"""Define the metadata_id column for sub classes."""
|
||||
return Column(
|
||||
Integer,
|
||||
ForeignKey(f"{TABLE_STATISTICS_META}.id", ondelete="CASCADE"),
|
||||
index=True,
|
||||
)
|
||||
|
||||
start = Column(DATETIME_TYPE, index=True)
|
||||
mean = Column(DOUBLE_TYPE)
|
||||
min = Column(DOUBLE_TYPE)
|
||||
max = Column(DOUBLE_TYPE)
|
||||
last_reset = Column(DATETIME_TYPE)
|
||||
state = Column(DOUBLE_TYPE)
|
||||
sum = Column(DOUBLE_TYPE)
|
||||
|
||||
@classmethod
|
||||
def from_stats(
|
||||
cls: type[_StatisticsBaseSelfT], metadata_id: int, stats: StatisticData
|
||||
) -> _StatisticsBaseSelfT:
|
||||
"""Create object from a statistics."""
|
||||
return cls( # type: ignore[call-arg,misc]
|
||||
metadata_id=metadata_id,
|
||||
**stats,
|
||||
)
|
||||
|
||||
|
||||
class Statistics(Base, StatisticsBase): # type: ignore[misc,valid-type]
|
||||
"""Long term statistics."""
|
||||
|
||||
duration = timedelta(hours=1)
|
||||
|
||||
__table_args__ = (
|
||||
# Used for fetching statistics for a certain entity at a specific time
|
||||
Index("ix_statistics_statistic_id_start", "metadata_id", "start", unique=True),
|
||||
)
|
||||
__tablename__ = TABLE_STATISTICS
|
||||
|
||||
|
||||
class StatisticsShortTerm(Base, StatisticsBase): # type: ignore[misc,valid-type]
|
||||
"""Short term statistics."""
|
||||
|
||||
duration = timedelta(minutes=5)
|
||||
|
||||
__table_args__ = (
|
||||
# Used for fetching statistics for a certain entity at a specific time
|
||||
Index(
|
||||
"ix_statistics_short_term_statistic_id_start",
|
||||
"metadata_id",
|
||||
"start",
|
||||
unique=True,
|
||||
),
|
||||
)
|
||||
__tablename__ = TABLE_STATISTICS_SHORT_TERM
|
||||
|
||||
|
||||
class StatisticsMeta(Base): # type: ignore[misc,valid-type]
|
||||
"""Statistics meta data."""
|
||||
|
||||
__table_args__ = (
|
||||
{"mysql_default_charset": "utf8mb4", "mysql_collate": "utf8mb4_unicode_ci"},
|
||||
)
|
||||
__tablename__ = TABLE_STATISTICS_META
|
||||
id = Column(Integer, Identity(), primary_key=True)
|
||||
statistic_id = Column(String(255), index=True, unique=True)
|
||||
source = Column(String(32))
|
||||
unit_of_measurement = Column(String(255))
|
||||
has_mean = Column(Boolean)
|
||||
has_sum = Column(Boolean)
|
||||
name = Column(String(255))
|
||||
|
||||
@staticmethod
|
||||
def from_meta(meta: StatisticMetaData) -> StatisticsMeta:
|
||||
"""Create object from meta data."""
|
||||
return StatisticsMeta(**meta)
|
||||
|
||||
|
||||
class RecorderRuns(Base): # type: ignore[misc,valid-type]
|
||||
"""Representation of recorder run."""
|
||||
|
||||
__table_args__ = (Index("ix_recorder_runs_start_end", "start", "end"),)
|
||||
__tablename__ = TABLE_RECORDER_RUNS
|
||||
run_id = Column(Integer, Identity(), primary_key=True)
|
||||
start = Column(DateTime(timezone=True), default=dt_util.utcnow)
|
||||
end = Column(DateTime(timezone=True))
|
||||
closed_incorrect = Column(Boolean, default=False)
|
||||
created = Column(DateTime(timezone=True), default=dt_util.utcnow)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Return string representation of instance for debugging."""
|
||||
end = (
|
||||
f"'{self.end.isoformat(sep=' ', timespec='seconds')}'" if self.end else None
|
||||
)
|
||||
return (
|
||||
f"<recorder.RecorderRuns("
|
||||
f"id={self.run_id}, start='{self.start.isoformat(sep=' ', timespec='seconds')}', "
|
||||
f"end={end}, closed_incorrect={self.closed_incorrect}, "
|
||||
f"created='{self.created.isoformat(sep=' ', timespec='seconds')}'"
|
||||
f")>"
|
||||
)
|
||||
|
||||
def entity_ids(self, point_in_time: datetime | None = None) -> list[str]:
|
||||
"""Return the entity ids that existed in this run.
|
||||
|
||||
Specify point_in_time if you want to know which existed at that point
|
||||
in time inside the run.
|
||||
"""
|
||||
session = Session.object_session(self)
|
||||
|
||||
assert session is not None, "RecorderRuns need to be persisted"
|
||||
|
||||
query = session.query(distinct(States.entity_id)).filter(
|
||||
States.last_updated >= self.start
|
||||
)
|
||||
|
||||
if point_in_time is not None:
|
||||
query = query.filter(States.last_updated < point_in_time)
|
||||
elif self.end is not None:
|
||||
query = query.filter(States.last_updated < self.end)
|
||||
|
||||
return [row[0] for row in query]
|
||||
|
||||
def to_native(self, validate_entity_id: bool = True) -> RecorderRuns:
|
||||
"""Return self, native format is this model."""
|
||||
return self
|
||||
|
||||
|
||||
class SchemaChanges(Base): # type: ignore[misc,valid-type]
|
||||
"""Representation of schema version changes."""
|
||||
|
||||
__tablename__ = TABLE_SCHEMA_CHANGES
|
||||
change_id = Column(Integer, Identity(), primary_key=True)
|
||||
schema_version = Column(Integer)
|
||||
changed = Column(DateTime(timezone=True), default=dt_util.utcnow)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Return string representation of instance for debugging."""
|
||||
return (
|
||||
f"<recorder.SchemaChanges("
|
||||
f"id={self.change_id}, schema_version={self.schema_version}, "
|
||||
f"changed='{self.changed.isoformat(sep=' ', timespec='seconds')}'"
|
||||
f")>"
|
||||
)
|
||||
|
||||
|
||||
class StatisticsRuns(Base): # type: ignore[misc,valid-type]
|
||||
"""Representation of statistics run."""
|
||||
|
||||
__tablename__ = TABLE_STATISTICS_RUNS
|
||||
run_id = Column(Integer, Identity(), primary_key=True)
|
||||
start = Column(DateTime(timezone=True), index=True)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Return string representation of instance for debugging."""
|
||||
return (
|
||||
f"<recorder.StatisticsRuns("
|
||||
f"id={self.run_id}, start='{self.start.isoformat(sep=' ', timespec='seconds')}', "
|
||||
f")>"
|
||||
)
|
||||
|
||||
|
||||
EVENT_DATA_JSON = type_coerce(
|
||||
EventData.shared_data.cast(JSONB_VARIENT_CAST), JSONLiteral(none_as_null=True)
|
||||
)
|
||||
OLD_FORMAT_EVENT_DATA_JSON = type_coerce(
|
||||
Events.event_data.cast(JSONB_VARIENT_CAST), JSONLiteral(none_as_null=True)
|
||||
)
|
||||
|
||||
SHARED_ATTRS_JSON = type_coerce(
|
||||
StateAttributes.shared_attrs.cast(JSON_VARIENT_CAST), JSON(none_as_null=True)
|
||||
)
|
||||
OLD_FORMAT_ATTRS_JSON = type_coerce(
|
||||
States.attributes.cast(JSON_VARIENT_CAST), JSON(none_as_null=True)
|
||||
)
|
||||
|
||||
ENTITY_ID_IN_EVENT: Column = EVENT_DATA_JSON["entity_id"]
|
||||
OLD_ENTITY_ID_IN_EVENT: Column = OLD_FORMAT_EVENT_DATA_JSON["entity_id"]
|
||||
DEVICE_ID_IN_EVENT: Column = EVENT_DATA_JSON["device_id"]
|
||||
OLD_STATE = aliased(States, name="old_state")
|
@ -21,21 +21,18 @@ from sqlalchemy.pool import StaticPool
|
||||
from homeassistant.bootstrap import async_setup_component
|
||||
from homeassistant.components import persistent_notification as pn, recorder
|
||||
from homeassistant.components.recorder import db_schema, migration
|
||||
from homeassistant.components.recorder.const import SQLITE_URL_PREFIX
|
||||
from homeassistant.components.recorder.db_schema import (
|
||||
SCHEMA_VERSION,
|
||||
RecorderRuns,
|
||||
States,
|
||||
)
|
||||
from homeassistant.components.recorder.statistics import get_start_time
|
||||
from homeassistant.components.recorder.util import session_scope
|
||||
from homeassistant.helpers import recorder as recorder_helper
|
||||
from homeassistant.setup import setup_component
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .common import async_wait_recording_done, create_engine_test, wait_recording_done
|
||||
from .common import async_wait_recording_done, create_engine_test
|
||||
|
||||
from tests.common import async_fire_time_changed, get_test_home_assistant
|
||||
from tests.common import async_fire_time_changed
|
||||
|
||||
ORIG_TZ = dt_util.DEFAULT_TIME_ZONE
|
||||
|
||||
@ -363,114 +360,6 @@ async def test_schema_migrate(hass, start_version, live):
|
||||
assert recorder.util.async_migration_in_progress(hass) is not True
|
||||
|
||||
|
||||
def test_set_state_unit(caplog, tmpdir):
|
||||
"""Test state unit column is initialized."""
|
||||
|
||||
def _create_engine_29(*args, **kwargs):
|
||||
"""Test version of create_engine that initializes with old schema.
|
||||
|
||||
This simulates an existing db with the old schema.
|
||||
"""
|
||||
module = "tests.components.recorder.db_schema_29"
|
||||
importlib.import_module(module)
|
||||
old_db_schema = sys.modules[module]
|
||||
engine = create_engine(*args, **kwargs)
|
||||
old_db_schema.Base.metadata.create_all(engine)
|
||||
with Session(engine) as session:
|
||||
session.add(recorder.db_schema.StatisticsRuns(start=get_start_time()))
|
||||
session.add(
|
||||
recorder.db_schema.SchemaChanges(
|
||||
schema_version=old_db_schema.SCHEMA_VERSION
|
||||
)
|
||||
)
|
||||
session.commit()
|
||||
return engine
|
||||
|
||||
test_db_file = tmpdir.mkdir("sqlite").join("test_run_info.db")
|
||||
dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}"
|
||||
|
||||
module = "tests.components.recorder.db_schema_29"
|
||||
importlib.import_module(module)
|
||||
old_db_schema = sys.modules[module]
|
||||
|
||||
external_energy_metadata_1 = {
|
||||
"has_mean": False,
|
||||
"has_sum": True,
|
||||
"name": "Total imported energy",
|
||||
"source": "test",
|
||||
"statistic_id": "test:total_energy_import_tariff_1",
|
||||
"unit_of_measurement": "kWh",
|
||||
}
|
||||
external_co2_metadata = {
|
||||
"has_mean": True,
|
||||
"has_sum": False,
|
||||
"name": "Fossil percentage",
|
||||
"source": "test",
|
||||
"statistic_id": "test:fossil_percentage",
|
||||
"unit_of_measurement": "%",
|
||||
}
|
||||
|
||||
# Create some statistics_meta with schema version 29
|
||||
with patch.object(recorder, "db_schema", old_db_schema), patch.object(
|
||||
recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION
|
||||
), patch(
|
||||
"homeassistant.components.recorder.core.create_engine", new=_create_engine_29
|
||||
):
|
||||
hass = get_test_home_assistant()
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
|
||||
wait_recording_done(hass)
|
||||
wait_recording_done(hass)
|
||||
|
||||
with session_scope(hass=hass) as session:
|
||||
session.add(
|
||||
recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_1)
|
||||
)
|
||||
session.add(
|
||||
recorder.db_schema.StatisticsMeta.from_meta(external_co2_metadata)
|
||||
)
|
||||
|
||||
with session_scope(hass=hass) as session:
|
||||
tmp = session.query(recorder.db_schema.StatisticsMeta).all()
|
||||
assert len(tmp) == 2
|
||||
assert tmp[0].id == 1
|
||||
assert tmp[0].statistic_id == "test:total_energy_import_tariff_1"
|
||||
assert tmp[0].unit_of_measurement == "kWh"
|
||||
assert not hasattr(tmp[0], "state_unit_of_measurement")
|
||||
assert tmp[1].id == 2
|
||||
assert tmp[1].statistic_id == "test:fossil_percentage"
|
||||
assert tmp[1].unit_of_measurement == "%"
|
||||
assert not hasattr(tmp[1], "state_unit_of_measurement")
|
||||
|
||||
hass.stop()
|
||||
dt_util.DEFAULT_TIME_ZONE = ORIG_TZ
|
||||
|
||||
# Test that the state_unit column is initialized during migration from schema 28
|
||||
hass = get_test_home_assistant()
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
|
||||
hass.start()
|
||||
wait_recording_done(hass)
|
||||
wait_recording_done(hass)
|
||||
|
||||
with session_scope(hass=hass) as session:
|
||||
tmp = session.query(recorder.db_schema.StatisticsMeta).all()
|
||||
assert len(tmp) == 2
|
||||
assert tmp[0].id == 1
|
||||
assert tmp[0].statistic_id == "test:total_energy_import_tariff_1"
|
||||
assert tmp[0].unit_of_measurement == "kWh"
|
||||
assert hasattr(tmp[0], "state_unit_of_measurement")
|
||||
assert tmp[0].state_unit_of_measurement == "kWh"
|
||||
assert tmp[1].id == 2
|
||||
assert tmp[1].statistic_id == "test:fossil_percentage"
|
||||
assert hasattr(tmp[1], "state_unit_of_measurement")
|
||||
assert tmp[1].state_unit_of_measurement == "%"
|
||||
assert tmp[1].state_unit_of_measurement == "%"
|
||||
|
||||
hass.stop()
|
||||
dt_util.DEFAULT_TIME_ZONE = ORIG_TZ
|
||||
|
||||
|
||||
def test_invalid_update(hass):
|
||||
"""Test that an invalid new version raises an exception."""
|
||||
with pytest.raises(ValueError):
|
||||
|
@ -159,7 +159,6 @@ def mock_sensor_statistics():
|
||||
"has_mean": True,
|
||||
"has_sum": False,
|
||||
"name": None,
|
||||
"state_unit_of_measurement": "dogs",
|
||||
"statistic_id": entity_id,
|
||||
"unit_of_measurement": "dogs",
|
||||
},
|
||||
@ -488,7 +487,6 @@ async def test_import_statistics(
|
||||
"has_sum": True,
|
||||
"name": "Total imported energy",
|
||||
"source": source,
|
||||
"state_unit_of_measurement": "kWh",
|
||||
"statistic_id": statistic_id,
|
||||
"unit_of_measurement": "kWh",
|
||||
}
|
||||
@ -530,7 +528,6 @@ async def test_import_statistics(
|
||||
"statistic_id": statistic_id,
|
||||
"name": "Total imported energy",
|
||||
"source": source,
|
||||
"state_unit_of_measurement": "kWh",
|
||||
"statistics_unit_of_measurement": "kWh",
|
||||
"unit_class": "energy",
|
||||
}
|
||||
@ -544,7 +541,6 @@ async def test_import_statistics(
|
||||
"has_sum": True,
|
||||
"name": "Total imported energy",
|
||||
"source": source,
|
||||
"state_unit_of_measurement": "kWh",
|
||||
"statistic_id": statistic_id,
|
||||
"unit_of_measurement": "kWh",
|
||||
},
|
||||
@ -604,7 +600,7 @@ async def test_import_statistics(
|
||||
]
|
||||
}
|
||||
|
||||
# Update the previously inserted statistics + rename and change display unit
|
||||
# Update the previously inserted statistics + rename
|
||||
external_statistics = {
|
||||
"start": period1,
|
||||
"max": 1,
|
||||
@ -615,7 +611,6 @@ async def test_import_statistics(
|
||||
"sum": 5,
|
||||
}
|
||||
external_metadata["name"] = "Total imported energy renamed"
|
||||
external_metadata["state_unit_of_measurement"] = "MWh"
|
||||
import_fn(hass, external_metadata, (external_statistics,))
|
||||
await async_wait_recording_done(hass)
|
||||
statistic_ids = list_statistic_ids(hass)
|
||||
@ -626,7 +621,6 @@ async def test_import_statistics(
|
||||
"statistic_id": statistic_id,
|
||||
"name": "Total imported energy renamed",
|
||||
"source": source,
|
||||
"state_unit_of_measurement": "MWh",
|
||||
"statistics_unit_of_measurement": "kWh",
|
||||
"unit_class": "energy",
|
||||
}
|
||||
@ -640,7 +634,6 @@ async def test_import_statistics(
|
||||
"has_sum": True,
|
||||
"name": "Total imported energy renamed",
|
||||
"source": source,
|
||||
"state_unit_of_measurement": "MWh",
|
||||
"statistic_id": statistic_id,
|
||||
"unit_of_measurement": "kWh",
|
||||
},
|
||||
@ -653,12 +646,12 @@ async def test_import_statistics(
|
||||
"statistic_id": statistic_id,
|
||||
"start": period1.isoformat(),
|
||||
"end": (period1 + timedelta(hours=1)).isoformat(),
|
||||
"max": approx(1.0 / 1000),
|
||||
"mean": approx(2.0 / 1000),
|
||||
"min": approx(3.0 / 1000),
|
||||
"max": approx(1.0),
|
||||
"mean": approx(2.0),
|
||||
"min": approx(3.0),
|
||||
"last_reset": last_reset_utc_str,
|
||||
"state": approx(4.0 / 1000),
|
||||
"sum": approx(5.0 / 1000),
|
||||
"state": approx(4.0),
|
||||
"sum": approx(5.0),
|
||||
},
|
||||
{
|
||||
"statistic_id": statistic_id,
|
||||
@ -668,13 +661,13 @@ async def test_import_statistics(
|
||||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": last_reset_utc_str,
|
||||
"state": approx(1.0 / 1000),
|
||||
"sum": approx(3.0 / 1000),
|
||||
"state": approx(1.0),
|
||||
"sum": approx(3.0),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
# Adjust the statistics
|
||||
# Adjust the statistics in a different unit
|
||||
await client.send_json(
|
||||
{
|
||||
"id": 1,
|
||||
@ -696,12 +689,12 @@ async def test_import_statistics(
|
||||
"statistic_id": statistic_id,
|
||||
"start": period1.isoformat(),
|
||||
"end": (period1 + timedelta(hours=1)).isoformat(),
|
||||
"max": approx(1.0 / 1000),
|
||||
"mean": approx(2.0 / 1000),
|
||||
"min": approx(3.0 / 1000),
|
||||
"max": approx(1.0),
|
||||
"mean": approx(2.0),
|
||||
"min": approx(3.0),
|
||||
"last_reset": last_reset_utc_str,
|
||||
"state": approx(4.0 / 1000),
|
||||
"sum": approx(5.0 / 1000),
|
||||
"state": approx(4.0),
|
||||
"sum": approx(5.0),
|
||||
},
|
||||
{
|
||||
"statistic_id": statistic_id,
|
||||
@ -711,8 +704,8 @@ async def test_import_statistics(
|
||||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": last_reset_utc_str,
|
||||
"state": approx(1.0 / 1000),
|
||||
"sum": approx(1000 + 3.0 / 1000),
|
||||
"state": approx(1.0),
|
||||
"sum": approx(1000 * 1000 + 3.0),
|
||||
},
|
||||
]
|
||||
}
|
||||
@ -741,7 +734,6 @@ def test_external_statistics_errors(hass_recorder, caplog):
|
||||
"has_sum": True,
|
||||
"name": "Total imported energy",
|
||||
"source": "test",
|
||||
"state_unit_of_measurement": "kWh",
|
||||
"statistic_id": "test:total_energy_import",
|
||||
"unit_of_measurement": "kWh",
|
||||
}
|
||||
@ -805,16 +797,6 @@ def test_external_statistics_errors(hass_recorder, caplog):
|
||||
assert list_statistic_ids(hass) == []
|
||||
assert get_metadata(hass, statistic_ids=("test:total_energy_import",)) == {}
|
||||
|
||||
# Attempt to insert statistics with an invalid unit combination
|
||||
external_metadata = {**_external_metadata, "state_unit_of_measurement": "cats"}
|
||||
external_statistics = {**_external_statistics}
|
||||
with pytest.raises(HomeAssistantError):
|
||||
async_add_external_statistics(hass, external_metadata, (external_statistics,))
|
||||
wait_recording_done(hass)
|
||||
assert statistics_during_period(hass, zero, period="hour") == {}
|
||||
assert list_statistic_ids(hass) == []
|
||||
assert get_metadata(hass, statistic_ids=("test:total_energy_import",)) == {}
|
||||
|
||||
|
||||
def test_import_statistics_errors(hass_recorder, caplog):
|
||||
"""Test validation of imported statistics."""
|
||||
@ -839,7 +821,6 @@ def test_import_statistics_errors(hass_recorder, caplog):
|
||||
"has_sum": True,
|
||||
"name": "Total imported energy",
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": "kWh",
|
||||
"statistic_id": "sensor.total_energy_import",
|
||||
"unit_of_measurement": "kWh",
|
||||
}
|
||||
@ -903,16 +884,6 @@ def test_import_statistics_errors(hass_recorder, caplog):
|
||||
assert list_statistic_ids(hass) == []
|
||||
assert get_metadata(hass, statistic_ids=("sensor.total_energy_import",)) == {}
|
||||
|
||||
# Attempt to insert statistics with an invalid unit combination
|
||||
external_metadata = {**_external_metadata, "state_unit_of_measurement": "cats"}
|
||||
external_statistics = {**_external_statistics}
|
||||
with pytest.raises(HomeAssistantError):
|
||||
async_import_statistics(hass, external_metadata, (external_statistics,))
|
||||
wait_recording_done(hass)
|
||||
assert statistics_during_period(hass, zero, period="hour") == {}
|
||||
assert list_statistic_ids(hass) == []
|
||||
assert get_metadata(hass, statistic_ids=("sensor.total_energy_import",)) == {}
|
||||
|
||||
|
||||
@pytest.mark.parametrize("timezone", ["America/Regina", "Europe/Vienna", "UTC"])
|
||||
@pytest.mark.freeze_time("2021-08-01 00:00:00+00:00")
|
||||
@ -962,7 +933,6 @@ def test_monthly_statistics(hass_recorder, caplog, timezone):
|
||||
"has_sum": True,
|
||||
"name": "Total imported energy",
|
||||
"source": "test",
|
||||
"state_unit_of_measurement": "kWh",
|
||||
"statistic_id": "test:total_energy_import",
|
||||
"unit_of_measurement": "kWh",
|
||||
}
|
||||
@ -1081,7 +1051,6 @@ def test_duplicate_statistics_handle_integrity_error(hass_recorder, caplog):
|
||||
"has_sum": True,
|
||||
"name": "Total imported energy",
|
||||
"source": "test",
|
||||
"state_unit_of_measurement": "kWh",
|
||||
"statistic_id": "test:total_energy_import_tariff_1",
|
||||
"unit_of_measurement": "kWh",
|
||||
}
|
||||
|
@ -651,7 +651,6 @@ async def test_list_statistic_ids(
|
||||
"has_sum": has_sum,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": display_unit,
|
||||
"statistics_unit_of_measurement": statistics_unit,
|
||||
"unit_class": unit_class,
|
||||
}
|
||||
@ -673,7 +672,6 @@ async def test_list_statistic_ids(
|
||||
"has_sum": has_sum,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": display_unit,
|
||||
"statistics_unit_of_measurement": statistics_unit,
|
||||
"unit_class": unit_class,
|
||||
}
|
||||
@ -698,7 +696,6 @@ async def test_list_statistic_ids(
|
||||
"has_sum": has_sum,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": display_unit,
|
||||
"statistics_unit_of_measurement": statistics_unit,
|
||||
"unit_class": unit_class,
|
||||
}
|
||||
@ -719,7 +716,6 @@ async def test_list_statistic_ids(
|
||||
"has_sum": has_sum,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": display_unit,
|
||||
"statistics_unit_of_measurement": statistics_unit,
|
||||
"unit_class": unit_class,
|
||||
}
|
||||
@ -907,7 +903,6 @@ async def test_update_statistics_metadata(
|
||||
"has_sum": False,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": "kW",
|
||||
"statistics_unit_of_measurement": "kW",
|
||||
"unit_class": None,
|
||||
}
|
||||
@ -935,7 +930,6 @@ async def test_update_statistics_metadata(
|
||||
"has_sum": False,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": "kW",
|
||||
"statistics_unit_of_measurement": new_unit,
|
||||
"unit_class": new_unit_class,
|
||||
}
|
||||
@ -999,7 +993,6 @@ async def test_change_statistics_unit(hass, hass_ws_client, recorder_mock):
|
||||
"has_sum": False,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": "kW",
|
||||
"statistics_unit_of_measurement": "kW",
|
||||
"unit_class": None,
|
||||
}
|
||||
@ -1055,7 +1048,6 @@ async def test_change_statistics_unit(hass, hass_ws_client, recorder_mock):
|
||||
"has_sum": False,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": "kW",
|
||||
"statistics_unit_of_measurement": "W",
|
||||
"unit_class": "power",
|
||||
}
|
||||
@ -1108,7 +1100,6 @@ async def test_change_statistics_unit_errors(
|
||||
"has_sum": False,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": "kW",
|
||||
"statistics_unit_of_measurement": "kW",
|
||||
"unit_class": None,
|
||||
}
|
||||
@ -1461,7 +1452,6 @@ async def test_get_statistics_metadata(
|
||||
"has_sum": has_sum,
|
||||
"name": "Total imported energy",
|
||||
"source": "test",
|
||||
"state_unit_of_measurement": unit,
|
||||
"statistic_id": "test:total_gas",
|
||||
"unit_of_measurement": unit,
|
||||
}
|
||||
@ -1487,7 +1477,6 @@ async def test_get_statistics_metadata(
|
||||
"has_sum": has_sum,
|
||||
"name": "Total imported energy",
|
||||
"source": "test",
|
||||
"state_unit_of_measurement": unit,
|
||||
"statistics_unit_of_measurement": unit,
|
||||
"unit_class": unit_class,
|
||||
}
|
||||
@ -1515,7 +1504,6 @@ async def test_get_statistics_metadata(
|
||||
"has_sum": has_sum,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": attributes["unit_of_measurement"],
|
||||
"statistics_unit_of_measurement": unit,
|
||||
"unit_class": unit_class,
|
||||
}
|
||||
@ -1543,7 +1531,6 @@ async def test_get_statistics_metadata(
|
||||
"has_sum": has_sum,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": attributes["unit_of_measurement"],
|
||||
"statistics_unit_of_measurement": unit,
|
||||
"unit_class": unit_class,
|
||||
}
|
||||
@ -1640,7 +1627,6 @@ async def test_import_statistics(
|
||||
"statistic_id": statistic_id,
|
||||
"name": "Total imported energy",
|
||||
"source": source,
|
||||
"state_unit_of_measurement": "kWh",
|
||||
"statistics_unit_of_measurement": "kWh",
|
||||
"unit_class": "energy",
|
||||
}
|
||||
@ -1654,7 +1640,6 @@ async def test_import_statistics(
|
||||
"has_sum": True,
|
||||
"name": "Total imported energy",
|
||||
"source": source,
|
||||
"state_unit_of_measurement": "kWh",
|
||||
"statistic_id": statistic_id,
|
||||
"unit_of_measurement": "kWh",
|
||||
},
|
||||
@ -1869,7 +1854,6 @@ async def test_adjust_sum_statistics_energy(
|
||||
"statistic_id": statistic_id,
|
||||
"name": "Total imported energy",
|
||||
"source": source,
|
||||
"state_unit_of_measurement": "kWh",
|
||||
"statistics_unit_of_measurement": "kWh",
|
||||
"unit_class": "energy",
|
||||
}
|
||||
@ -1883,7 +1867,6 @@ async def test_adjust_sum_statistics_energy(
|
||||
"has_sum": True,
|
||||
"name": "Total imported energy",
|
||||
"source": source,
|
||||
"state_unit_of_measurement": "kWh",
|
||||
"statistic_id": statistic_id,
|
||||
"unit_of_measurement": "kWh",
|
||||
},
|
||||
@ -2067,7 +2050,6 @@ async def test_adjust_sum_statistics_gas(
|
||||
"statistic_id": statistic_id,
|
||||
"name": "Total imported energy",
|
||||
"source": source,
|
||||
"state_unit_of_measurement": "m³",
|
||||
"statistics_unit_of_measurement": "m³",
|
||||
"unit_class": "volume",
|
||||
}
|
||||
@ -2081,7 +2063,6 @@ async def test_adjust_sum_statistics_gas(
|
||||
"has_sum": True,
|
||||
"name": "Total imported energy",
|
||||
"source": source,
|
||||
"state_unit_of_measurement": "m³",
|
||||
"statistic_id": statistic_id,
|
||||
"unit_of_measurement": "m³",
|
||||
},
|
||||
@ -2281,7 +2262,6 @@ async def test_adjust_sum_statistics_errors(
|
||||
"statistic_id": statistic_id,
|
||||
"name": "Total imported energy",
|
||||
"source": source,
|
||||
"state_unit_of_measurement": state_unit,
|
||||
"statistics_unit_of_measurement": statistic_unit,
|
||||
"unit_class": unit_class,
|
||||
}
|
||||
@ -2295,7 +2275,6 @@ async def test_adjust_sum_statistics_errors(
|
||||
"has_sum": True,
|
||||
"name": "Total imported energy",
|
||||
"source": source,
|
||||
"state_unit_of_measurement": state_unit,
|
||||
"statistic_id": statistic_id,
|
||||
"unit_of_measurement": statistic_unit,
|
||||
},
|
||||
|
@ -140,7 +140,6 @@ def test_compile_hourly_statistics(
|
||||
"has_sum": False,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": display_unit,
|
||||
"statistics_unit_of_measurement": statistics_unit,
|
||||
"unit_class": unit_class,
|
||||
}
|
||||
@ -215,7 +214,6 @@ def test_compile_hourly_statistics_purged_state_changes(
|
||||
"has_sum": False,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": display_unit,
|
||||
"statistics_unit_of_measurement": statistics_unit,
|
||||
"unit_class": unit_class,
|
||||
}
|
||||
@ -285,7 +283,6 @@ def test_compile_hourly_statistics_unsupported(hass_recorder, caplog, attributes
|
||||
"has_sum": False,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": "°C",
|
||||
"statistics_unit_of_measurement": "°C",
|
||||
"unit_class": "temperature",
|
||||
},
|
||||
@ -295,7 +292,6 @@ def test_compile_hourly_statistics_unsupported(hass_recorder, caplog, attributes
|
||||
"has_sum": False,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": "°C",
|
||||
"statistics_unit_of_measurement": "°C",
|
||||
"unit_class": "temperature",
|
||||
},
|
||||
@ -305,7 +301,6 @@ def test_compile_hourly_statistics_unsupported(hass_recorder, caplog, attributes
|
||||
"has_sum": False,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": "°C",
|
||||
"statistics_unit_of_measurement": "°C",
|
||||
"unit_class": "temperature",
|
||||
},
|
||||
@ -440,7 +435,6 @@ async def test_compile_hourly_sum_statistics_amount(
|
||||
"has_sum": True,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": display_unit,
|
||||
"statistics_unit_of_measurement": statistics_unit,
|
||||
"unit_class": unit_class,
|
||||
}
|
||||
@ -633,7 +627,6 @@ def test_compile_hourly_sum_statistics_amount_reset_every_state_change(
|
||||
"has_sum": True,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": display_unit,
|
||||
"statistics_unit_of_measurement": statistics_unit,
|
||||
"unit_class": unit_class,
|
||||
}
|
||||
@ -734,7 +727,6 @@ def test_compile_hourly_sum_statistics_amount_invalid_last_reset(
|
||||
"has_sum": True,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": display_unit,
|
||||
"statistics_unit_of_measurement": statistics_unit,
|
||||
"unit_class": unit_class,
|
||||
}
|
||||
@ -819,7 +811,6 @@ def test_compile_hourly_sum_statistics_nan_inf_state(
|
||||
"has_sum": True,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": display_unit,
|
||||
"statistics_unit_of_measurement": statistics_unit,
|
||||
"unit_class": unit_class,
|
||||
}
|
||||
@ -933,7 +924,6 @@ def test_compile_hourly_sum_statistics_negative_state(
|
||||
"has_sum": True,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": display_unit,
|
||||
"statistic_id": entity_id,
|
||||
"statistics_unit_of_measurement": statistics_unit,
|
||||
"unit_class": unit_class,
|
||||
@ -1022,7 +1012,6 @@ def test_compile_hourly_sum_statistics_total_no_reset(
|
||||
"has_sum": True,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": display_unit,
|
||||
"statistics_unit_of_measurement": statistics_unit,
|
||||
"unit_class": unit_class,
|
||||
}
|
||||
@ -1125,7 +1114,6 @@ def test_compile_hourly_sum_statistics_total_increasing(
|
||||
"has_sum": True,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": display_unit,
|
||||
"statistics_unit_of_measurement": statistics_unit,
|
||||
"unit_class": unit_class,
|
||||
}
|
||||
@ -1239,7 +1227,6 @@ def test_compile_hourly_sum_statistics_total_increasing_small_dip(
|
||||
"has_sum": True,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": display_unit,
|
||||
"statistics_unit_of_measurement": statistics_unit,
|
||||
"unit_class": unit_class,
|
||||
}
|
||||
@ -1334,7 +1321,6 @@ def test_compile_hourly_energy_statistics_unsupported(hass_recorder, caplog):
|
||||
"has_sum": True,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": "kWh",
|
||||
"statistics_unit_of_measurement": "kWh",
|
||||
"unit_class": "energy",
|
||||
}
|
||||
@ -1427,7 +1413,6 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog):
|
||||
"has_sum": True,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": "kWh",
|
||||
"statistics_unit_of_measurement": "kWh",
|
||||
"unit_class": "energy",
|
||||
},
|
||||
@ -1437,7 +1422,6 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog):
|
||||
"has_sum": True,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": "kWh",
|
||||
"statistics_unit_of_measurement": "kWh",
|
||||
"unit_class": "energy",
|
||||
},
|
||||
@ -1447,7 +1431,6 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog):
|
||||
"has_sum": True,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": "Wh",
|
||||
"statistics_unit_of_measurement": "kWh",
|
||||
"unit_class": "energy",
|
||||
},
|
||||
@ -1811,7 +1794,6 @@ def test_list_statistic_ids(
|
||||
"has_sum": statistic_type == "sum",
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": display_unit,
|
||||
"statistics_unit_of_measurement": statistics_unit,
|
||||
"unit_class": unit_class,
|
||||
},
|
||||
@ -1826,7 +1808,6 @@ def test_list_statistic_ids(
|
||||
"has_sum": statistic_type == "sum",
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": display_unit,
|
||||
"statistics_unit_of_measurement": statistics_unit,
|
||||
"unit_class": unit_class,
|
||||
},
|
||||
@ -1917,7 +1898,6 @@ def test_compile_hourly_statistics_changing_units_1(
|
||||
"has_sum": False,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": display_unit,
|
||||
"statistics_unit_of_measurement": statistics_unit,
|
||||
"unit_class": unit_class,
|
||||
},
|
||||
@ -1953,7 +1933,6 @@ def test_compile_hourly_statistics_changing_units_1(
|
||||
"has_sum": False,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": display_unit,
|
||||
"statistics_unit_of_measurement": statistics_unit,
|
||||
"unit_class": unit_class,
|
||||
},
|
||||
@ -2029,7 +2008,6 @@ def test_compile_hourly_statistics_changing_units_2(
|
||||
"has_sum": False,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": "cats",
|
||||
"statistics_unit_of_measurement": "cats",
|
||||
"unit_class": unit_class,
|
||||
},
|
||||
@ -2095,7 +2073,6 @@ def test_compile_hourly_statistics_changing_units_3(
|
||||
"has_sum": False,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": display_unit,
|
||||
"statistics_unit_of_measurement": statistics_unit,
|
||||
"unit_class": unit_class,
|
||||
},
|
||||
@ -2131,7 +2108,6 @@ def test_compile_hourly_statistics_changing_units_3(
|
||||
"has_sum": False,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": display_unit,
|
||||
"statistics_unit_of_measurement": statistics_unit,
|
||||
"unit_class": unit_class,
|
||||
},
|
||||
@ -2197,7 +2173,6 @@ def test_compile_hourly_statistics_changing_device_class_1(
|
||||
"has_sum": False,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": state_unit,
|
||||
"statistics_unit_of_measurement": state_unit,
|
||||
"unit_class": unit_class,
|
||||
},
|
||||
@ -2243,7 +2218,6 @@ def test_compile_hourly_statistics_changing_device_class_1(
|
||||
"has_sum": False,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": state_unit,
|
||||
"statistics_unit_of_measurement": state_unit,
|
||||
"unit_class": unit_class,
|
||||
},
|
||||
@ -2306,7 +2280,6 @@ def test_compile_hourly_statistics_changing_device_class_1(
|
||||
"has_sum": False,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": state_unit,
|
||||
"statistics_unit_of_measurement": state_unit,
|
||||
"unit_class": unit_class,
|
||||
},
|
||||
@ -2386,7 +2359,6 @@ def test_compile_hourly_statistics_changing_device_class_2(
|
||||
"has_sum": False,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": display_unit,
|
||||
"statistics_unit_of_measurement": statistic_unit,
|
||||
"unit_class": unit_class,
|
||||
},
|
||||
@ -2436,7 +2408,6 @@ def test_compile_hourly_statistics_changing_device_class_2(
|
||||
"has_sum": False,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": display_unit,
|
||||
"statistics_unit_of_measurement": statistic_unit,
|
||||
"unit_class": unit_class,
|
||||
},
|
||||
@ -2506,7 +2477,6 @@ def test_compile_hourly_statistics_changing_statistics(
|
||||
"has_sum": False,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": None,
|
||||
"statistics_unit_of_measurement": None,
|
||||
"unit_class": None,
|
||||
},
|
||||
@ -2520,7 +2490,6 @@ def test_compile_hourly_statistics_changing_statistics(
|
||||
"has_sum": False,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": None,
|
||||
"statistic_id": "sensor.test1",
|
||||
"unit_of_measurement": None,
|
||||
},
|
||||
@ -2543,7 +2512,6 @@ def test_compile_hourly_statistics_changing_statistics(
|
||||
"has_sum": True,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": None,
|
||||
"statistics_unit_of_measurement": None,
|
||||
"unit_class": None,
|
||||
},
|
||||
@ -2557,7 +2525,6 @@ def test_compile_hourly_statistics_changing_statistics(
|
||||
"has_sum": True,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": None,
|
||||
"statistic_id": "sensor.test1",
|
||||
"unit_of_measurement": None,
|
||||
},
|
||||
@ -2738,7 +2705,6 @@ def test_compile_statistics_hourly_daily_monthly_summary(hass_recorder, caplog):
|
||||
"has_sum": False,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": "%",
|
||||
"statistics_unit_of_measurement": "%",
|
||||
"unit_class": None,
|
||||
},
|
||||
@ -2748,7 +2714,6 @@ def test_compile_statistics_hourly_daily_monthly_summary(hass_recorder, caplog):
|
||||
"has_sum": False,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": "%",
|
||||
"statistics_unit_of_measurement": "%",
|
||||
"unit_class": None,
|
||||
},
|
||||
@ -2758,7 +2723,6 @@ def test_compile_statistics_hourly_daily_monthly_summary(hass_recorder, caplog):
|
||||
"has_sum": False,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": "%",
|
||||
"statistics_unit_of_measurement": "%",
|
||||
"unit_class": None,
|
||||
},
|
||||
@ -2768,7 +2732,6 @@ def test_compile_statistics_hourly_daily_monthly_summary(hass_recorder, caplog):
|
||||
"has_sum": True,
|
||||
"name": None,
|
||||
"source": "recorder",
|
||||
"state_unit_of_measurement": "EUR",
|
||||
"statistics_unit_of_measurement": "EUR",
|
||||
"unit_class": None,
|
||||
},
|
||||
|
Loading…
x
Reference in New Issue
Block a user