mirror of
https://github.com/home-assistant/core.git
synced 2025-07-26 06:37:52 +00:00
Add circular mean statistics and sensor state class MEASUREMENT_ANGLE (#138453)
* Add circular mean statistics * fixes * Add has_circular_mean and fix tests * Fix mypy * Rename to MEASUREMENT_ANGLE * Fix kitchen_sink tests * Fix sensor tests * for testing only * Revert ws command change * Apply suggestions * test only * add custom handling for postgres * fix recursion limit * Check if column is already available * Set default false and not nullable for has_circular_mean * Proper fix to be backwards compatible * Fix value is None * Align with schema * Remove has_circular_mean from test schemas as it's not required anymore * fix wrong column type * Use correct variable to reduce stats * Add guard that the uom is matching a valid one from the state class * Add some tests * Fix tests again * Use mean_type in StatisticsMetato difference between different mean type algorithms * Fix leftovers * Fix kitchen_sink tests * Fix postgres * Add circular mean test * Add mean_type_changed stats issue * Align the attributes with unit_changed * Fix mean_type_change stats issue * Add missing sensor recorder tests * Add test_statistic_during_period_circular_mean * Add mean_weight * Add test_statistic_during_period_hole_circular_mean * Use seperate migration step to null has_mean * Typo ARITHMETIC * Implement requested changes * Implement requested changes * Split into #141444 * Add StatisticMeanType.NONE and forbid that mean_type can be None * Fix mean_type * Implement requested changes * Small leftover of latest StatisticMeanType changes
This commit is contained in:
parent
4a6d2c91da
commit
e3f2f30395
@ -8,7 +8,11 @@ from aiodukeenergy import DukeEnergy
|
|||||||
from aiohttp import ClientError
|
from aiohttp import ClientError
|
||||||
|
|
||||||
from homeassistant.components.recorder import get_instance
|
from homeassistant.components.recorder import get_instance
|
||||||
from homeassistant.components.recorder.models import StatisticData, StatisticMetaData
|
from homeassistant.components.recorder.models import (
|
||||||
|
StatisticData,
|
||||||
|
StatisticMeanType,
|
||||||
|
StatisticMetaData,
|
||||||
|
)
|
||||||
from homeassistant.components.recorder.statistics import (
|
from homeassistant.components.recorder.statistics import (
|
||||||
async_add_external_statistics,
|
async_add_external_statistics,
|
||||||
get_last_statistics,
|
get_last_statistics,
|
||||||
@ -137,7 +141,7 @@ class DukeEnergyCoordinator(DataUpdateCoordinator[None]):
|
|||||||
f"Duke Energy {meter['serviceType'].capitalize()} {serial_number}"
|
f"Duke Energy {meter['serviceType'].capitalize()} {serial_number}"
|
||||||
)
|
)
|
||||||
consumption_metadata = StatisticMetaData(
|
consumption_metadata = StatisticMetaData(
|
||||||
has_mean=False,
|
mean_type=StatisticMeanType.NONE,
|
||||||
has_sum=True,
|
has_sum=True,
|
||||||
name=f"{name_prefix} Consumption",
|
name=f"{name_prefix} Consumption",
|
||||||
source=DOMAIN,
|
source=DOMAIN,
|
||||||
|
@ -7,7 +7,11 @@ from typing import TYPE_CHECKING, cast
|
|||||||
|
|
||||||
from elvia import Elvia, error as ElviaError
|
from elvia import Elvia, error as ElviaError
|
||||||
|
|
||||||
from homeassistant.components.recorder.models import StatisticData, StatisticMetaData
|
from homeassistant.components.recorder.models import (
|
||||||
|
StatisticData,
|
||||||
|
StatisticMeanType,
|
||||||
|
StatisticMetaData,
|
||||||
|
)
|
||||||
from homeassistant.components.recorder.statistics import (
|
from homeassistant.components.recorder.statistics import (
|
||||||
async_add_external_statistics,
|
async_add_external_statistics,
|
||||||
get_last_statistics,
|
get_last_statistics,
|
||||||
@ -144,7 +148,7 @@ class ElviaImporter:
|
|||||||
async_add_external_statistics(
|
async_add_external_statistics(
|
||||||
hass=self.hass,
|
hass=self.hass,
|
||||||
metadata=StatisticMetaData(
|
metadata=StatisticMetaData(
|
||||||
has_mean=False,
|
mean_type=StatisticMeanType.NONE,
|
||||||
has_sum=True,
|
has_sum=True,
|
||||||
name=f"{self.metering_point_id} Consumption",
|
name=f"{self.metering_point_id} Consumption",
|
||||||
source=DOMAIN,
|
source=DOMAIN,
|
||||||
|
@ -8,6 +8,7 @@ import datetime
|
|||||||
from enum import StrEnum
|
from enum import StrEnum
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
from homeassistant.components.recorder.models import StatisticMeanType
|
||||||
from homeassistant.components.recorder.models.statistics import (
|
from homeassistant.components.recorder.models.statistics import (
|
||||||
StatisticData,
|
StatisticData,
|
||||||
StatisticMetaData,
|
StatisticMetaData,
|
||||||
@ -270,7 +271,7 @@ class IstaSensor(CoordinatorEntity[IstaCoordinator], SensorEntity):
|
|||||||
]
|
]
|
||||||
|
|
||||||
metadata: StatisticMetaData = {
|
metadata: StatisticMetaData = {
|
||||||
"has_mean": False,
|
"mean_type": StatisticMeanType.NONE,
|
||||||
"has_sum": True,
|
"has_sum": True,
|
||||||
"name": f"{self.device_entry.name} {self.name}",
|
"name": f"{self.device_entry.name} {self.name}",
|
||||||
"source": DOMAIN,
|
"source": DOMAIN,
|
||||||
|
@ -12,14 +12,24 @@ from random import random
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.components.recorder import DOMAIN as RECORDER_DOMAIN, get_instance
|
from homeassistant.components.recorder import DOMAIN as RECORDER_DOMAIN, get_instance
|
||||||
from homeassistant.components.recorder.models import StatisticData, StatisticMetaData
|
from homeassistant.components.recorder.models import (
|
||||||
|
StatisticData,
|
||||||
|
StatisticMeanType,
|
||||||
|
StatisticMetaData,
|
||||||
|
)
|
||||||
from homeassistant.components.recorder.statistics import (
|
from homeassistant.components.recorder.statistics import (
|
||||||
async_add_external_statistics,
|
async_add_external_statistics,
|
||||||
async_import_statistics,
|
async_import_statistics,
|
||||||
get_last_statistics,
|
get_last_statistics,
|
||||||
)
|
)
|
||||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||||
from homeassistant.const import Platform, UnitOfEnergy, UnitOfTemperature, UnitOfVolume
|
from homeassistant.const import (
|
||||||
|
DEGREE,
|
||||||
|
Platform,
|
||||||
|
UnitOfEnergy,
|
||||||
|
UnitOfTemperature,
|
||||||
|
UnitOfVolume,
|
||||||
|
)
|
||||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||||
from homeassistant.helpers import config_validation as cv
|
from homeassistant.helpers import config_validation as cv
|
||||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||||
@ -72,6 +82,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||||
"""Set the config entry up."""
|
"""Set the config entry up."""
|
||||||
|
if "recorder" in hass.config.components:
|
||||||
|
# Insert stats for mean_type_changed issue
|
||||||
|
await _insert_wrong_wind_direction_statistics(hass)
|
||||||
|
|
||||||
# Set up demo platforms with config entry
|
# Set up demo platforms with config entry
|
||||||
await hass.config_entries.async_forward_entry_setups(
|
await hass.config_entries.async_forward_entry_setups(
|
||||||
entry, COMPONENTS_WITH_DEMO_PLATFORM
|
entry, COMPONENTS_WITH_DEMO_PLATFORM
|
||||||
@ -233,7 +247,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None:
|
|||||||
"name": "Outdoor temperature",
|
"name": "Outdoor temperature",
|
||||||
"statistic_id": f"{DOMAIN}:temperature_outdoor",
|
"statistic_id": f"{DOMAIN}:temperature_outdoor",
|
||||||
"unit_of_measurement": UnitOfTemperature.CELSIUS,
|
"unit_of_measurement": UnitOfTemperature.CELSIUS,
|
||||||
"has_mean": True,
|
"mean_type": StatisticMeanType.ARITHMETIC,
|
||||||
"has_sum": False,
|
"has_sum": False,
|
||||||
}
|
}
|
||||||
statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 15, 1)
|
statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 15, 1)
|
||||||
@ -246,7 +260,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None:
|
|||||||
"name": "Energy consumption 1",
|
"name": "Energy consumption 1",
|
||||||
"statistic_id": f"{DOMAIN}:energy_consumption_kwh",
|
"statistic_id": f"{DOMAIN}:energy_consumption_kwh",
|
||||||
"unit_of_measurement": UnitOfEnergy.KILO_WATT_HOUR,
|
"unit_of_measurement": UnitOfEnergy.KILO_WATT_HOUR,
|
||||||
"has_mean": False,
|
"mean_type": StatisticMeanType.NONE,
|
||||||
"has_sum": True,
|
"has_sum": True,
|
||||||
}
|
}
|
||||||
await _insert_sum_statistics(hass, metadata, yesterday_midnight, today_midnight, 1)
|
await _insert_sum_statistics(hass, metadata, yesterday_midnight, today_midnight, 1)
|
||||||
@ -258,7 +272,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None:
|
|||||||
"name": "Energy consumption 2",
|
"name": "Energy consumption 2",
|
||||||
"statistic_id": f"{DOMAIN}:energy_consumption_mwh",
|
"statistic_id": f"{DOMAIN}:energy_consumption_mwh",
|
||||||
"unit_of_measurement": UnitOfEnergy.MEGA_WATT_HOUR,
|
"unit_of_measurement": UnitOfEnergy.MEGA_WATT_HOUR,
|
||||||
"has_mean": False,
|
"mean_type": StatisticMeanType.NONE,
|
||||||
"has_sum": True,
|
"has_sum": True,
|
||||||
}
|
}
|
||||||
await _insert_sum_statistics(
|
await _insert_sum_statistics(
|
||||||
@ -272,7 +286,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None:
|
|||||||
"name": "Gas consumption 1",
|
"name": "Gas consumption 1",
|
||||||
"statistic_id": f"{DOMAIN}:gas_consumption_m3",
|
"statistic_id": f"{DOMAIN}:gas_consumption_m3",
|
||||||
"unit_of_measurement": UnitOfVolume.CUBIC_METERS,
|
"unit_of_measurement": UnitOfVolume.CUBIC_METERS,
|
||||||
"has_mean": False,
|
"mean_type": StatisticMeanType.NONE,
|
||||||
"has_sum": True,
|
"has_sum": True,
|
||||||
}
|
}
|
||||||
await _insert_sum_statistics(
|
await _insert_sum_statistics(
|
||||||
@ -286,7 +300,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None:
|
|||||||
"name": "Gas consumption 2",
|
"name": "Gas consumption 2",
|
||||||
"statistic_id": f"{DOMAIN}:gas_consumption_ft3",
|
"statistic_id": f"{DOMAIN}:gas_consumption_ft3",
|
||||||
"unit_of_measurement": UnitOfVolume.CUBIC_FEET,
|
"unit_of_measurement": UnitOfVolume.CUBIC_FEET,
|
||||||
"has_mean": False,
|
"mean_type": StatisticMeanType.NONE,
|
||||||
"has_sum": True,
|
"has_sum": True,
|
||||||
}
|
}
|
||||||
await _insert_sum_statistics(hass, metadata, yesterday_midnight, today_midnight, 15)
|
await _insert_sum_statistics(hass, metadata, yesterday_midnight, today_midnight, 15)
|
||||||
@ -298,7 +312,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None:
|
|||||||
"name": None,
|
"name": None,
|
||||||
"statistic_id": "sensor.statistics_issues_issue_1",
|
"statistic_id": "sensor.statistics_issues_issue_1",
|
||||||
"unit_of_measurement": UnitOfVolume.CUBIC_METERS,
|
"unit_of_measurement": UnitOfVolume.CUBIC_METERS,
|
||||||
"has_mean": True,
|
"mean_type": StatisticMeanType.ARITHMETIC,
|
||||||
"has_sum": False,
|
"has_sum": False,
|
||||||
}
|
}
|
||||||
statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 15, 1)
|
statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 15, 1)
|
||||||
@ -310,7 +324,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None:
|
|||||||
"name": None,
|
"name": None,
|
||||||
"statistic_id": "sensor.statistics_issues_issue_2",
|
"statistic_id": "sensor.statistics_issues_issue_2",
|
||||||
"unit_of_measurement": "cats",
|
"unit_of_measurement": "cats",
|
||||||
"has_mean": True,
|
"mean_type": StatisticMeanType.ARITHMETIC,
|
||||||
"has_sum": False,
|
"has_sum": False,
|
||||||
}
|
}
|
||||||
statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 15, 1)
|
statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 15, 1)
|
||||||
@ -322,7 +336,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None:
|
|||||||
"name": None,
|
"name": None,
|
||||||
"statistic_id": "sensor.statistics_issues_issue_3",
|
"statistic_id": "sensor.statistics_issues_issue_3",
|
||||||
"unit_of_measurement": UnitOfVolume.CUBIC_METERS,
|
"unit_of_measurement": UnitOfVolume.CUBIC_METERS,
|
||||||
"has_mean": True,
|
"mean_type": StatisticMeanType.ARITHMETIC,
|
||||||
"has_sum": False,
|
"has_sum": False,
|
||||||
}
|
}
|
||||||
statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 15, 1)
|
statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 15, 1)
|
||||||
@ -334,8 +348,28 @@ async def _insert_statistics(hass: HomeAssistant) -> None:
|
|||||||
"name": None,
|
"name": None,
|
||||||
"statistic_id": "sensor.statistics_issues_issue_4",
|
"statistic_id": "sensor.statistics_issues_issue_4",
|
||||||
"unit_of_measurement": UnitOfVolume.CUBIC_METERS,
|
"unit_of_measurement": UnitOfVolume.CUBIC_METERS,
|
||||||
"has_mean": True,
|
"mean_type": StatisticMeanType.ARITHMETIC,
|
||||||
"has_sum": False,
|
"has_sum": False,
|
||||||
}
|
}
|
||||||
statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 15, 1)
|
statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 15, 1)
|
||||||
async_import_statistics(hass, metadata, statistics)
|
async_import_statistics(hass, metadata, statistics)
|
||||||
|
|
||||||
|
|
||||||
|
async def _insert_wrong_wind_direction_statistics(hass: HomeAssistant) -> None:
|
||||||
|
"""Insert some fake wind direction statistics."""
|
||||||
|
now = dt_util.now()
|
||||||
|
yesterday = now - datetime.timedelta(days=1)
|
||||||
|
yesterday_midnight = yesterday.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||||
|
today_midnight = yesterday_midnight + datetime.timedelta(days=1)
|
||||||
|
|
||||||
|
# Add some statistics required to raise the mean_type_changed issue later
|
||||||
|
metadata: StatisticMetaData = {
|
||||||
|
"source": RECORDER_DOMAIN,
|
||||||
|
"name": None,
|
||||||
|
"statistic_id": "sensor.statistics_issues_issue_5",
|
||||||
|
"unit_of_measurement": DEGREE,
|
||||||
|
"mean_type": StatisticMeanType.ARITHMETIC,
|
||||||
|
"has_sum": False,
|
||||||
|
}
|
||||||
|
statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 0, 360)
|
||||||
|
async_import_statistics(hass, metadata, statistics)
|
||||||
|
@ -8,7 +8,7 @@ from homeassistant.components.sensor import (
|
|||||||
SensorStateClass,
|
SensorStateClass,
|
||||||
)
|
)
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import UnitOfPower
|
from homeassistant.const import DEGREE, UnitOfPower
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.device_registry import DeviceInfo
|
from homeassistant.helpers.device_registry import DeviceInfo
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
@ -87,6 +87,16 @@ async def async_setup_entry(
|
|||||||
state_class=None,
|
state_class=None,
|
||||||
unit_of_measurement=UnitOfPower.WATT,
|
unit_of_measurement=UnitOfPower.WATT,
|
||||||
),
|
),
|
||||||
|
DemoSensor(
|
||||||
|
device_unique_id="statistics_issues",
|
||||||
|
unique_id="statistics_issue_5",
|
||||||
|
device_name="Statistics issues",
|
||||||
|
entity_name="Issue 5",
|
||||||
|
state=100,
|
||||||
|
device_class=SensorDeviceClass.WIND_DIRECTION,
|
||||||
|
state_class=SensorStateClass.MEASUREMENT_ANGLE,
|
||||||
|
unit_of_measurement=DEGREE,
|
||||||
|
),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -16,7 +16,11 @@ from opower import (
|
|||||||
from opower.exceptions import ApiException, CannotConnect, InvalidAuth
|
from opower.exceptions import ApiException, CannotConnect, InvalidAuth
|
||||||
|
|
||||||
from homeassistant.components.recorder import get_instance
|
from homeassistant.components.recorder import get_instance
|
||||||
from homeassistant.components.recorder.models import StatisticData, StatisticMetaData
|
from homeassistant.components.recorder.models import (
|
||||||
|
StatisticData,
|
||||||
|
StatisticMeanType,
|
||||||
|
StatisticMetaData,
|
||||||
|
)
|
||||||
from homeassistant.components.recorder.statistics import (
|
from homeassistant.components.recorder.statistics import (
|
||||||
async_add_external_statistics,
|
async_add_external_statistics,
|
||||||
get_last_statistics,
|
get_last_statistics,
|
||||||
@ -201,7 +205,7 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]):
|
|||||||
f"{account.meter_type.name.lower()} {account.utility_account_id}"
|
f"{account.meter_type.name.lower()} {account.utility_account_id}"
|
||||||
)
|
)
|
||||||
cost_metadata = StatisticMetaData(
|
cost_metadata = StatisticMetaData(
|
||||||
has_mean=False,
|
mean_type=StatisticMeanType.NONE,
|
||||||
has_sum=True,
|
has_sum=True,
|
||||||
name=f"{name_prefix} cost",
|
name=f"{name_prefix} cost",
|
||||||
source=DOMAIN,
|
source=DOMAIN,
|
||||||
@ -209,7 +213,7 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]):
|
|||||||
unit_of_measurement=None,
|
unit_of_measurement=None,
|
||||||
)
|
)
|
||||||
consumption_metadata = StatisticMetaData(
|
consumption_metadata = StatisticMetaData(
|
||||||
has_mean=False,
|
mean_type=StatisticMeanType.NONE,
|
||||||
has_sum=True,
|
has_sum=True,
|
||||||
name=f"{name_prefix} consumption",
|
name=f"{name_prefix} consumption",
|
||||||
source=DOMAIN,
|
source=DOMAIN,
|
||||||
|
@ -54,6 +54,7 @@ CONTEXT_ID_AS_BINARY_SCHEMA_VERSION = 36
|
|||||||
EVENT_TYPE_IDS_SCHEMA_VERSION = 37
|
EVENT_TYPE_IDS_SCHEMA_VERSION = 37
|
||||||
STATES_META_SCHEMA_VERSION = 38
|
STATES_META_SCHEMA_VERSION = 38
|
||||||
LAST_REPORTED_SCHEMA_VERSION = 43
|
LAST_REPORTED_SCHEMA_VERSION = 43
|
||||||
|
CIRCULAR_MEAN_SCHEMA_VERSION = 49
|
||||||
|
|
||||||
LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION = 28
|
LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION = 28
|
||||||
LEGACY_STATES_EVENT_FOREIGN_KEYS_FIXED_SCHEMA_VERSION = 43
|
LEGACY_STATES_EVENT_FOREIGN_KEYS_FIXED_SCHEMA_VERSION = 43
|
||||||
|
@ -79,7 +79,13 @@ from .db_schema import (
|
|||||||
StatisticsShortTerm,
|
StatisticsShortTerm,
|
||||||
)
|
)
|
||||||
from .executor import DBInterruptibleThreadPoolExecutor
|
from .executor import DBInterruptibleThreadPoolExecutor
|
||||||
from .models import DatabaseEngine, StatisticData, StatisticMetaData, UnsupportedDialect
|
from .models import (
|
||||||
|
DatabaseEngine,
|
||||||
|
StatisticData,
|
||||||
|
StatisticMeanType,
|
||||||
|
StatisticMetaData,
|
||||||
|
UnsupportedDialect,
|
||||||
|
)
|
||||||
from .pool import POOL_SIZE, MutexPool, RecorderPool
|
from .pool import POOL_SIZE, MutexPool, RecorderPool
|
||||||
from .table_managers.event_data import EventDataManager
|
from .table_managers.event_data import EventDataManager
|
||||||
from .table_managers.event_types import EventTypeManager
|
from .table_managers.event_types import EventTypeManager
|
||||||
@ -611,6 +617,17 @@ class Recorder(threading.Thread):
|
|||||||
table: type[Statistics | StatisticsShortTerm],
|
table: type[Statistics | StatisticsShortTerm],
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Schedule import of statistics."""
|
"""Schedule import of statistics."""
|
||||||
|
if "mean_type" not in metadata:
|
||||||
|
# Backwards compatibility for old metadata format
|
||||||
|
# Can be removed after 2026.4
|
||||||
|
metadata["mean_type"] = ( # type: ignore[unreachable]
|
||||||
|
StatisticMeanType.ARITHMETIC
|
||||||
|
if metadata.get("has_mean")
|
||||||
|
else StatisticMeanType.NONE
|
||||||
|
)
|
||||||
|
# Remove deprecated has_mean as it's not needed anymore in core
|
||||||
|
metadata.pop("has_mean", None)
|
||||||
|
|
||||||
self.queue_task(ImportStatisticsTask(metadata, stats, table))
|
self.queue_task(ImportStatisticsTask(metadata, stats, table))
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
|
@ -58,6 +58,7 @@ from .const import ALL_DOMAIN_EXCLUDE_ATTRS, SupportedDialect
|
|||||||
from .models import (
|
from .models import (
|
||||||
StatisticData,
|
StatisticData,
|
||||||
StatisticDataTimestamp,
|
StatisticDataTimestamp,
|
||||||
|
StatisticMeanType,
|
||||||
StatisticMetaData,
|
StatisticMetaData,
|
||||||
bytes_to_ulid_or_none,
|
bytes_to_ulid_or_none,
|
||||||
bytes_to_uuid_hex_or_none,
|
bytes_to_uuid_hex_or_none,
|
||||||
@ -77,7 +78,7 @@ class LegacyBase(DeclarativeBase):
|
|||||||
"""Base class for tables, used for schema migration."""
|
"""Base class for tables, used for schema migration."""
|
||||||
|
|
||||||
|
|
||||||
SCHEMA_VERSION = 48
|
SCHEMA_VERSION = 50
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -719,6 +720,7 @@ class StatisticsBase:
|
|||||||
start: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN)
|
start: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN)
|
||||||
start_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE, index=True)
|
start_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE, index=True)
|
||||||
mean: Mapped[float | None] = mapped_column(DOUBLE_TYPE)
|
mean: Mapped[float | None] = mapped_column(DOUBLE_TYPE)
|
||||||
|
mean_weight: Mapped[float | None] = mapped_column(DOUBLE_TYPE)
|
||||||
min: Mapped[float | None] = mapped_column(DOUBLE_TYPE)
|
min: Mapped[float | None] = mapped_column(DOUBLE_TYPE)
|
||||||
max: Mapped[float | None] = mapped_column(DOUBLE_TYPE)
|
max: Mapped[float | None] = mapped_column(DOUBLE_TYPE)
|
||||||
last_reset: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN)
|
last_reset: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN)
|
||||||
@ -740,6 +742,7 @@ class StatisticsBase:
|
|||||||
start=None,
|
start=None,
|
||||||
start_ts=stats["start"].timestamp(),
|
start_ts=stats["start"].timestamp(),
|
||||||
mean=stats.get("mean"),
|
mean=stats.get("mean"),
|
||||||
|
mean_weight=stats.get("mean_weight"),
|
||||||
min=stats.get("min"),
|
min=stats.get("min"),
|
||||||
max=stats.get("max"),
|
max=stats.get("max"),
|
||||||
last_reset=None,
|
last_reset=None,
|
||||||
@ -763,6 +766,7 @@ class StatisticsBase:
|
|||||||
start=None,
|
start=None,
|
||||||
start_ts=stats["start_ts"],
|
start_ts=stats["start_ts"],
|
||||||
mean=stats.get("mean"),
|
mean=stats.get("mean"),
|
||||||
|
mean_weight=stats.get("mean_weight"),
|
||||||
min=stats.get("min"),
|
min=stats.get("min"),
|
||||||
max=stats.get("max"),
|
max=stats.get("max"),
|
||||||
last_reset=None,
|
last_reset=None,
|
||||||
@ -848,6 +852,9 @@ class _StatisticsMeta:
|
|||||||
has_mean: Mapped[bool | None] = mapped_column(Boolean)
|
has_mean: Mapped[bool | None] = mapped_column(Boolean)
|
||||||
has_sum: Mapped[bool | None] = mapped_column(Boolean)
|
has_sum: Mapped[bool | None] = mapped_column(Boolean)
|
||||||
name: Mapped[str | None] = mapped_column(String(255))
|
name: Mapped[str | None] = mapped_column(String(255))
|
||||||
|
mean_type: Mapped[StatisticMeanType] = mapped_column(
|
||||||
|
SmallInteger, nullable=False, default=StatisticMeanType.NONE.value
|
||||||
|
) # See StatisticMeanType
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_meta(meta: StatisticMetaData) -> StatisticsMeta:
|
def from_meta(meta: StatisticMetaData) -> StatisticsMeta:
|
||||||
|
@ -81,7 +81,7 @@ from .db_schema import (
|
|||||||
StatisticsRuns,
|
StatisticsRuns,
|
||||||
StatisticsShortTerm,
|
StatisticsShortTerm,
|
||||||
)
|
)
|
||||||
from .models import process_timestamp
|
from .models import StatisticMeanType, process_timestamp
|
||||||
from .models.time import datetime_to_timestamp_or_none
|
from .models.time import datetime_to_timestamp_or_none
|
||||||
from .queries import (
|
from .queries import (
|
||||||
batch_cleanup_entity_ids,
|
batch_cleanup_entity_ids,
|
||||||
@ -144,24 +144,32 @@ class _ColumnTypesForDialect:
|
|||||||
big_int_type: str
|
big_int_type: str
|
||||||
timestamp_type: str
|
timestamp_type: str
|
||||||
context_bin_type: str
|
context_bin_type: str
|
||||||
|
small_int_type: str
|
||||||
|
double_type: str
|
||||||
|
|
||||||
|
|
||||||
_MYSQL_COLUMN_TYPES = _ColumnTypesForDialect(
|
_MYSQL_COLUMN_TYPES = _ColumnTypesForDialect(
|
||||||
big_int_type="INTEGER(20)",
|
big_int_type="INTEGER(20)",
|
||||||
timestamp_type=DOUBLE_PRECISION_TYPE_SQL,
|
timestamp_type=DOUBLE_PRECISION_TYPE_SQL,
|
||||||
context_bin_type=f"BLOB({CONTEXT_ID_BIN_MAX_LENGTH})",
|
context_bin_type=f"BLOB({CONTEXT_ID_BIN_MAX_LENGTH})",
|
||||||
|
small_int_type="SMALLINT",
|
||||||
|
double_type=DOUBLE_PRECISION_TYPE_SQL,
|
||||||
)
|
)
|
||||||
|
|
||||||
_POSTGRESQL_COLUMN_TYPES = _ColumnTypesForDialect(
|
_POSTGRESQL_COLUMN_TYPES = _ColumnTypesForDialect(
|
||||||
big_int_type="INTEGER",
|
big_int_type="INTEGER",
|
||||||
timestamp_type=DOUBLE_PRECISION_TYPE_SQL,
|
timestamp_type=DOUBLE_PRECISION_TYPE_SQL,
|
||||||
context_bin_type="BYTEA",
|
context_bin_type="BYTEA",
|
||||||
|
small_int_type="SMALLINT",
|
||||||
|
double_type=DOUBLE_PRECISION_TYPE_SQL,
|
||||||
)
|
)
|
||||||
|
|
||||||
_SQLITE_COLUMN_TYPES = _ColumnTypesForDialect(
|
_SQLITE_COLUMN_TYPES = _ColumnTypesForDialect(
|
||||||
big_int_type="INTEGER",
|
big_int_type="INTEGER",
|
||||||
timestamp_type="FLOAT",
|
timestamp_type="FLOAT",
|
||||||
context_bin_type="BLOB",
|
context_bin_type="BLOB",
|
||||||
|
small_int_type="INTEGER",
|
||||||
|
double_type="FLOAT",
|
||||||
)
|
)
|
||||||
|
|
||||||
_COLUMN_TYPES_FOR_DIALECT: dict[SupportedDialect | None, _ColumnTypesForDialect] = {
|
_COLUMN_TYPES_FOR_DIALECT: dict[SupportedDialect | None, _ColumnTypesForDialect] = {
|
||||||
@ -1993,6 +2001,42 @@ class _SchemaVersion48Migrator(_SchemaVersionMigrator, target_version=48):
|
|||||||
_migrate_columns_to_timestamp(self.instance, self.session_maker, self.engine)
|
_migrate_columns_to_timestamp(self.instance, self.session_maker, self.engine)
|
||||||
|
|
||||||
|
|
||||||
|
class _SchemaVersion49Migrator(_SchemaVersionMigrator, target_version=49):
|
||||||
|
def _apply_update(self) -> None:
|
||||||
|
"""Version specific update method."""
|
||||||
|
_add_columns(
|
||||||
|
self.session_maker,
|
||||||
|
"statistics_meta",
|
||||||
|
[
|
||||||
|
f"mean_type {self.column_types.small_int_type} NOT NULL DEFAULT {StatisticMeanType.NONE.value}"
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
for table in ("statistics", "statistics_short_term"):
|
||||||
|
_add_columns(
|
||||||
|
self.session_maker,
|
||||||
|
table,
|
||||||
|
[f"mean_weight {self.column_types.double_type}"],
|
||||||
|
)
|
||||||
|
|
||||||
|
with session_scope(session=self.session_maker()) as session:
|
||||||
|
connection = session.connection()
|
||||||
|
connection.execute(
|
||||||
|
text(
|
||||||
|
"UPDATE statistics_meta SET mean_type=:mean_type WHERE has_mean=true"
|
||||||
|
),
|
||||||
|
{"mean_type": StatisticMeanType.ARITHMETIC.value},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class _SchemaVersion50Migrator(_SchemaVersionMigrator, target_version=50):
|
||||||
|
def _apply_update(self) -> None:
|
||||||
|
"""Version specific update method."""
|
||||||
|
with session_scope(session=self.session_maker()) as session:
|
||||||
|
connection = session.connection()
|
||||||
|
connection.execute(text("UPDATE statistics_meta SET has_mean=NULL"))
|
||||||
|
|
||||||
|
|
||||||
def _migrate_statistics_columns_to_timestamp_removing_duplicates(
|
def _migrate_statistics_columns_to_timestamp_removing_duplicates(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
instance: Recorder,
|
instance: Recorder,
|
||||||
|
@ -17,6 +17,7 @@ from .statistics import (
|
|||||||
RollingWindowStatisticPeriod,
|
RollingWindowStatisticPeriod,
|
||||||
StatisticData,
|
StatisticData,
|
||||||
StatisticDataTimestamp,
|
StatisticDataTimestamp,
|
||||||
|
StatisticMeanType,
|
||||||
StatisticMetaData,
|
StatisticMetaData,
|
||||||
StatisticPeriod,
|
StatisticPeriod,
|
||||||
StatisticResult,
|
StatisticResult,
|
||||||
@ -37,6 +38,7 @@ __all__ = [
|
|||||||
"RollingWindowStatisticPeriod",
|
"RollingWindowStatisticPeriod",
|
||||||
"StatisticData",
|
"StatisticData",
|
||||||
"StatisticDataTimestamp",
|
"StatisticDataTimestamp",
|
||||||
|
"StatisticMeanType",
|
||||||
"StatisticMetaData",
|
"StatisticMetaData",
|
||||||
"StatisticPeriod",
|
"StatisticPeriod",
|
||||||
"StatisticResult",
|
"StatisticResult",
|
||||||
|
@ -3,7 +3,8 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from typing import Literal, TypedDict
|
from enum import IntEnum
|
||||||
|
from typing import Literal, NotRequired, TypedDict
|
||||||
|
|
||||||
|
|
||||||
class StatisticResult(TypedDict):
|
class StatisticResult(TypedDict):
|
||||||
@ -36,6 +37,7 @@ class StatisticMixIn(TypedDict, total=False):
|
|||||||
min: float
|
min: float
|
||||||
max: float
|
max: float
|
||||||
mean: float
|
mean: float
|
||||||
|
mean_weight: float
|
||||||
|
|
||||||
|
|
||||||
class StatisticData(StatisticDataBase, StatisticMixIn, total=False):
|
class StatisticData(StatisticDataBase, StatisticMixIn, total=False):
|
||||||
@ -50,10 +52,20 @@ class StatisticDataTimestamp(StatisticDataTimestampBase, StatisticMixIn, total=F
|
|||||||
last_reset_ts: float | None
|
last_reset_ts: float | None
|
||||||
|
|
||||||
|
|
||||||
|
class StatisticMeanType(IntEnum):
|
||||||
|
"""Statistic mean type."""
|
||||||
|
|
||||||
|
NONE = 0
|
||||||
|
ARITHMETIC = 1
|
||||||
|
CIRCULAR = 2
|
||||||
|
|
||||||
|
|
||||||
class StatisticMetaData(TypedDict):
|
class StatisticMetaData(TypedDict):
|
||||||
"""Statistic meta data class."""
|
"""Statistic meta data class."""
|
||||||
|
|
||||||
has_mean: bool
|
# has_mean is deprecated, use mean_type instead. has_mean will be removed in 2026.4
|
||||||
|
has_mean: NotRequired[bool]
|
||||||
|
mean_type: StatisticMeanType
|
||||||
has_sum: bool
|
has_sum: bool
|
||||||
name: str | None
|
name: str | None
|
||||||
source: str
|
source: str
|
||||||
|
@ -9,12 +9,23 @@ from datetime import datetime, timedelta
|
|||||||
from functools import lru_cache, partial
|
from functools import lru_cache, partial
|
||||||
from itertools import chain, groupby
|
from itertools import chain, groupby
|
||||||
import logging
|
import logging
|
||||||
|
import math
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
import re
|
import re
|
||||||
from time import time as time_time
|
from time import time as time_time
|
||||||
from typing import TYPE_CHECKING, Any, Literal, TypedDict, cast
|
from typing import TYPE_CHECKING, Any, Literal, Required, TypedDict, cast
|
||||||
|
|
||||||
from sqlalchemy import Select, and_, bindparam, func, lambda_stmt, select, text
|
from sqlalchemy import (
|
||||||
|
Label,
|
||||||
|
Select,
|
||||||
|
and_,
|
||||||
|
bindparam,
|
||||||
|
case,
|
||||||
|
func,
|
||||||
|
lambda_stmt,
|
||||||
|
select,
|
||||||
|
text,
|
||||||
|
)
|
||||||
from sqlalchemy.engine.row import Row
|
from sqlalchemy.engine.row import Row
|
||||||
from sqlalchemy.exc import SQLAlchemyError
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
from sqlalchemy.orm.session import Session
|
from sqlalchemy.orm.session import Session
|
||||||
@ -29,6 +40,7 @@ from homeassistant.helpers.singleton import singleton
|
|||||||
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||||
from homeassistant.util import dt as dt_util
|
from homeassistant.util import dt as dt_util
|
||||||
from homeassistant.util.collection import chunked_or_all
|
from homeassistant.util.collection import chunked_or_all
|
||||||
|
from homeassistant.util.enum import try_parse_enum
|
||||||
from homeassistant.util.unit_conversion import (
|
from homeassistant.util.unit_conversion import (
|
||||||
AreaConverter,
|
AreaConverter,
|
||||||
BaseUnitConverter,
|
BaseUnitConverter,
|
||||||
@ -74,6 +86,7 @@ from .db_schema import (
|
|||||||
from .models import (
|
from .models import (
|
||||||
StatisticData,
|
StatisticData,
|
||||||
StatisticDataTimestamp,
|
StatisticDataTimestamp,
|
||||||
|
StatisticMeanType,
|
||||||
StatisticMetaData,
|
StatisticMetaData,
|
||||||
StatisticResult,
|
StatisticResult,
|
||||||
datetime_to_timestamp_or_none,
|
datetime_to_timestamp_or_none,
|
||||||
@ -113,11 +126,54 @@ QUERY_STATISTICS_SHORT_TERM = (
|
|||||||
StatisticsShortTerm.sum,
|
StatisticsShortTerm.sum,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def query_circular_mean(table: type[StatisticsBase]) -> tuple[Label, Label]:
|
||||||
|
"""Return the sqlalchemy function for circular mean and the mean_weight.
|
||||||
|
|
||||||
|
The result must be modulo 360 to normalize the result [0, 360].
|
||||||
|
"""
|
||||||
|
# Postgres doesn't support modulo for double precision and
|
||||||
|
# the other dbs return the remainder instead of the modulo
|
||||||
|
# meaning negative values are possible. For these reason
|
||||||
|
# we need to normalize the result to be in the range [0, 360)
|
||||||
|
# in Python.
|
||||||
|
# https://en.wikipedia.org/wiki/Circular_mean
|
||||||
|
radians = func.radians(table.mean)
|
||||||
|
weight = func.sqrt(
|
||||||
|
func.power(func.sum(func.sin(radians) * table.mean_weight), 2)
|
||||||
|
+ func.power(func.sum(func.cos(radians) * table.mean_weight), 2)
|
||||||
|
)
|
||||||
|
return (
|
||||||
|
func.degrees(
|
||||||
|
func.atan2(func.sum(func.sin(radians)), func.sum(func.cos(radians)))
|
||||||
|
).label("mean"),
|
||||||
|
weight.label("mean_weight"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
QUERY_STATISTICS_SUMMARY_MEAN = (
|
QUERY_STATISTICS_SUMMARY_MEAN = (
|
||||||
StatisticsShortTerm.metadata_id,
|
StatisticsShortTerm.metadata_id,
|
||||||
func.avg(StatisticsShortTerm.mean),
|
|
||||||
func.min(StatisticsShortTerm.min),
|
func.min(StatisticsShortTerm.min),
|
||||||
func.max(StatisticsShortTerm.max),
|
func.max(StatisticsShortTerm.max),
|
||||||
|
case(
|
||||||
|
(
|
||||||
|
StatisticsMeta.mean_type == StatisticMeanType.ARITHMETIC,
|
||||||
|
func.avg(StatisticsShortTerm.mean),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
StatisticsMeta.mean_type == StatisticMeanType.CIRCULAR,
|
||||||
|
query_circular_mean(StatisticsShortTerm)[0],
|
||||||
|
),
|
||||||
|
else_=None,
|
||||||
|
),
|
||||||
|
case(
|
||||||
|
(
|
||||||
|
StatisticsMeta.mean_type == StatisticMeanType.CIRCULAR,
|
||||||
|
query_circular_mean(StatisticsShortTerm)[1],
|
||||||
|
),
|
||||||
|
else_=None,
|
||||||
|
),
|
||||||
|
StatisticsMeta.mean_type,
|
||||||
)
|
)
|
||||||
|
|
||||||
QUERY_STATISTICS_SUMMARY_SUM = (
|
QUERY_STATISTICS_SUMMARY_SUM = (
|
||||||
@ -180,6 +236,24 @@ def mean(values: list[float]) -> float | None:
|
|||||||
return sum(values) / len(values)
|
return sum(values) / len(values)
|
||||||
|
|
||||||
|
|
||||||
|
DEG_TO_RAD = math.pi / 180
|
||||||
|
RAD_TO_DEG = 180 / math.pi
|
||||||
|
|
||||||
|
|
||||||
|
def weighted_circular_mean(values: Iterable[tuple[float, float]]) -> float:
|
||||||
|
"""Return the weighted circular mean of the values."""
|
||||||
|
sin_sum = sum(math.sin(x * DEG_TO_RAD) * weight for x, weight in values)
|
||||||
|
cos_sum = sum(math.cos(x * DEG_TO_RAD) * weight for x, weight in values)
|
||||||
|
return (RAD_TO_DEG * math.atan2(sin_sum, cos_sum)) % 360
|
||||||
|
|
||||||
|
|
||||||
|
def circular_mean(values: list[float]) -> float:
|
||||||
|
"""Return the circular mean of the values."""
|
||||||
|
sin_sum = sum(math.sin(x * DEG_TO_RAD) for x in values)
|
||||||
|
cos_sum = sum(math.cos(x * DEG_TO_RAD) for x in values)
|
||||||
|
return (RAD_TO_DEG * math.atan2(sin_sum, cos_sum)) % 360
|
||||||
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@ -372,11 +446,19 @@ def _compile_hourly_statistics_summary_mean_stmt(
|
|||||||
start_time_ts: float, end_time_ts: float
|
start_time_ts: float, end_time_ts: float
|
||||||
) -> StatementLambdaElement:
|
) -> StatementLambdaElement:
|
||||||
"""Generate the summary mean statement for hourly statistics."""
|
"""Generate the summary mean statement for hourly statistics."""
|
||||||
|
# Due the fact that we support different mean type (See StatisticMeanType)
|
||||||
|
# we need to join here with the StatisticsMeta table to get the mean type
|
||||||
|
# and then use a case statement to compute the mean based on the mean type.
|
||||||
|
# As we use the StatisticsMeta.mean_type in the select case statement we need
|
||||||
|
# to group by it as well.
|
||||||
return lambda_stmt(
|
return lambda_stmt(
|
||||||
lambda: select(*QUERY_STATISTICS_SUMMARY_MEAN)
|
lambda: select(*QUERY_STATISTICS_SUMMARY_MEAN)
|
||||||
.filter(StatisticsShortTerm.start_ts >= start_time_ts)
|
.filter(StatisticsShortTerm.start_ts >= start_time_ts)
|
||||||
.filter(StatisticsShortTerm.start_ts < end_time_ts)
|
.filter(StatisticsShortTerm.start_ts < end_time_ts)
|
||||||
.group_by(StatisticsShortTerm.metadata_id)
|
.join(
|
||||||
|
StatisticsMeta, and_(StatisticsShortTerm.metadata_id == StatisticsMeta.id)
|
||||||
|
)
|
||||||
|
.group_by(StatisticsShortTerm.metadata_id, StatisticsMeta.mean_type)
|
||||||
.order_by(StatisticsShortTerm.metadata_id)
|
.order_by(StatisticsShortTerm.metadata_id)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -418,10 +500,17 @@ def _compile_hourly_statistics(session: Session, start: datetime) -> None:
|
|||||||
|
|
||||||
if stats:
|
if stats:
|
||||||
for stat in stats:
|
for stat in stats:
|
||||||
metadata_id, _mean, _min, _max = stat
|
metadata_id, _min, _max, _mean, _mean_weight, _mean_type = stat
|
||||||
|
if (
|
||||||
|
try_parse_enum(StatisticMeanType, _mean_type)
|
||||||
|
is StatisticMeanType.CIRCULAR
|
||||||
|
):
|
||||||
|
# Normalize the circular mean to be in the range [0, 360)
|
||||||
|
_mean = _mean % 360
|
||||||
summary[metadata_id] = {
|
summary[metadata_id] = {
|
||||||
"start_ts": start_time_ts,
|
"start_ts": start_time_ts,
|
||||||
"mean": _mean,
|
"mean": _mean,
|
||||||
|
"mean_weight": _mean_weight,
|
||||||
"min": _min,
|
"min": _min,
|
||||||
"max": _max,
|
"max": _max,
|
||||||
}
|
}
|
||||||
@ -827,7 +916,7 @@ def _statistic_by_id_from_metadata(
|
|||||||
"display_unit_of_measurement": get_display_unit(
|
"display_unit_of_measurement": get_display_unit(
|
||||||
hass, meta["statistic_id"], meta["unit_of_measurement"]
|
hass, meta["statistic_id"], meta["unit_of_measurement"]
|
||||||
),
|
),
|
||||||
"has_mean": meta["has_mean"],
|
"mean_type": meta["mean_type"],
|
||||||
"has_sum": meta["has_sum"],
|
"has_sum": meta["has_sum"],
|
||||||
"name": meta["name"],
|
"name": meta["name"],
|
||||||
"source": meta["source"],
|
"source": meta["source"],
|
||||||
@ -846,7 +935,9 @@ def _flatten_list_statistic_ids_metadata_result(
|
|||||||
{
|
{
|
||||||
"statistic_id": _id,
|
"statistic_id": _id,
|
||||||
"display_unit_of_measurement": info["display_unit_of_measurement"],
|
"display_unit_of_measurement": info["display_unit_of_measurement"],
|
||||||
"has_mean": info["has_mean"],
|
"has_mean": info["mean_type"]
|
||||||
|
== StatisticMeanType.ARITHMETIC, # Can be removed with 2026.4
|
||||||
|
"mean_type": info["mean_type"],
|
||||||
"has_sum": info["has_sum"],
|
"has_sum": info["has_sum"],
|
||||||
"name": info.get("name"),
|
"name": info.get("name"),
|
||||||
"source": info["source"],
|
"source": info["source"],
|
||||||
@ -901,7 +992,7 @@ def list_statistic_ids(
|
|||||||
continue
|
continue
|
||||||
result[key] = {
|
result[key] = {
|
||||||
"display_unit_of_measurement": meta["unit_of_measurement"],
|
"display_unit_of_measurement": meta["unit_of_measurement"],
|
||||||
"has_mean": meta["has_mean"],
|
"mean_type": meta["mean_type"],
|
||||||
"has_sum": meta["has_sum"],
|
"has_sum": meta["has_sum"],
|
||||||
"name": meta["name"],
|
"name": meta["name"],
|
||||||
"source": meta["source"],
|
"source": meta["source"],
|
||||||
@ -919,6 +1010,7 @@ def _reduce_statistics(
|
|||||||
period_start_end: Callable[[float], tuple[float, float]],
|
period_start_end: Callable[[float], tuple[float, float]],
|
||||||
period: timedelta,
|
period: timedelta,
|
||||||
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]],
|
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]],
|
||||||
|
metadata: dict[str, tuple[int, StatisticMetaData]],
|
||||||
) -> dict[str, list[StatisticsRow]]:
|
) -> dict[str, list[StatisticsRow]]:
|
||||||
"""Reduce hourly statistics to daily or monthly statistics."""
|
"""Reduce hourly statistics to daily or monthly statistics."""
|
||||||
result: dict[str, list[StatisticsRow]] = defaultdict(list)
|
result: dict[str, list[StatisticsRow]] = defaultdict(list)
|
||||||
@ -946,7 +1038,13 @@ def _reduce_statistics(
|
|||||||
"end": end,
|
"end": end,
|
||||||
}
|
}
|
||||||
if _want_mean:
|
if _want_mean:
|
||||||
row["mean"] = mean(mean_values) if mean_values else None
|
row["mean"] = None
|
||||||
|
if mean_values:
|
||||||
|
match metadata[statistic_id][1]["mean_type"]:
|
||||||
|
case StatisticMeanType.ARITHMETIC:
|
||||||
|
row["mean"] = mean(mean_values)
|
||||||
|
case StatisticMeanType.CIRCULAR:
|
||||||
|
row["mean"] = circular_mean(mean_values)
|
||||||
mean_values.clear()
|
mean_values.clear()
|
||||||
if _want_min:
|
if _want_min:
|
||||||
row["min"] = min(min_values) if min_values else None
|
row["min"] = min(min_values) if min_values else None
|
||||||
@ -963,8 +1061,9 @@ def _reduce_statistics(
|
|||||||
result[statistic_id].append(row)
|
result[statistic_id].append(row)
|
||||||
if _want_max and (_max := statistic.get("max")) is not None:
|
if _want_max and (_max := statistic.get("max")) is not None:
|
||||||
max_values.append(_max)
|
max_values.append(_max)
|
||||||
if _want_mean and (_mean := statistic.get("mean")) is not None:
|
if _want_mean:
|
||||||
mean_values.append(_mean)
|
if (_mean := statistic.get("mean")) is not None:
|
||||||
|
mean_values.append(_mean)
|
||||||
if _want_min and (_min := statistic.get("min")) is not None:
|
if _want_min and (_min := statistic.get("min")) is not None:
|
||||||
min_values.append(_min)
|
min_values.append(_min)
|
||||||
prev_stat = statistic
|
prev_stat = statistic
|
||||||
@ -1011,11 +1110,12 @@ def reduce_day_ts_factory() -> tuple[
|
|||||||
def _reduce_statistics_per_day(
|
def _reduce_statistics_per_day(
|
||||||
stats: dict[str, list[StatisticsRow]],
|
stats: dict[str, list[StatisticsRow]],
|
||||||
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]],
|
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]],
|
||||||
|
metadata: dict[str, tuple[int, StatisticMetaData]],
|
||||||
) -> dict[str, list[StatisticsRow]]:
|
) -> dict[str, list[StatisticsRow]]:
|
||||||
"""Reduce hourly statistics to daily statistics."""
|
"""Reduce hourly statistics to daily statistics."""
|
||||||
_same_day_ts, _day_start_end_ts = reduce_day_ts_factory()
|
_same_day_ts, _day_start_end_ts = reduce_day_ts_factory()
|
||||||
return _reduce_statistics(
|
return _reduce_statistics(
|
||||||
stats, _same_day_ts, _day_start_end_ts, timedelta(days=1), types
|
stats, _same_day_ts, _day_start_end_ts, timedelta(days=1), types, metadata
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -1059,11 +1159,12 @@ def reduce_week_ts_factory() -> tuple[
|
|||||||
def _reduce_statistics_per_week(
|
def _reduce_statistics_per_week(
|
||||||
stats: dict[str, list[StatisticsRow]],
|
stats: dict[str, list[StatisticsRow]],
|
||||||
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]],
|
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]],
|
||||||
|
metadata: dict[str, tuple[int, StatisticMetaData]],
|
||||||
) -> dict[str, list[StatisticsRow]]:
|
) -> dict[str, list[StatisticsRow]]:
|
||||||
"""Reduce hourly statistics to weekly statistics."""
|
"""Reduce hourly statistics to weekly statistics."""
|
||||||
_same_week_ts, _week_start_end_ts = reduce_week_ts_factory()
|
_same_week_ts, _week_start_end_ts = reduce_week_ts_factory()
|
||||||
return _reduce_statistics(
|
return _reduce_statistics(
|
||||||
stats, _same_week_ts, _week_start_end_ts, timedelta(days=7), types
|
stats, _same_week_ts, _week_start_end_ts, timedelta(days=7), types, metadata
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -1112,11 +1213,12 @@ def reduce_month_ts_factory() -> tuple[
|
|||||||
def _reduce_statistics_per_month(
|
def _reduce_statistics_per_month(
|
||||||
stats: dict[str, list[StatisticsRow]],
|
stats: dict[str, list[StatisticsRow]],
|
||||||
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]],
|
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]],
|
||||||
|
metadata: dict[str, tuple[int, StatisticMetaData]],
|
||||||
) -> dict[str, list[StatisticsRow]]:
|
) -> dict[str, list[StatisticsRow]]:
|
||||||
"""Reduce hourly statistics to monthly statistics."""
|
"""Reduce hourly statistics to monthly statistics."""
|
||||||
_same_month_ts, _month_start_end_ts = reduce_month_ts_factory()
|
_same_month_ts, _month_start_end_ts = reduce_month_ts_factory()
|
||||||
return _reduce_statistics(
|
return _reduce_statistics(
|
||||||
stats, _same_month_ts, _month_start_end_ts, timedelta(days=31), types
|
stats, _same_month_ts, _month_start_end_ts, timedelta(days=31), types, metadata
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -1160,27 +1262,41 @@ def _generate_max_mean_min_statistic_in_sub_period_stmt(
|
|||||||
return stmt
|
return stmt
|
||||||
|
|
||||||
|
|
||||||
|
class _MaxMinMeanStatisticSubPeriod(TypedDict, total=False):
|
||||||
|
max: float
|
||||||
|
mean_acc: float
|
||||||
|
min: float
|
||||||
|
duration: float
|
||||||
|
circular_means: Required[list[tuple[float, float]]]
|
||||||
|
|
||||||
|
|
||||||
def _get_max_mean_min_statistic_in_sub_period(
|
def _get_max_mean_min_statistic_in_sub_period(
|
||||||
session: Session,
|
session: Session,
|
||||||
result: dict[str, float],
|
result: _MaxMinMeanStatisticSubPeriod,
|
||||||
start_time: datetime | None,
|
start_time: datetime | None,
|
||||||
end_time: datetime | None,
|
end_time: datetime | None,
|
||||||
table: type[StatisticsBase],
|
table: type[StatisticsBase],
|
||||||
types: set[Literal["max", "mean", "min", "change"]],
|
types: set[Literal["max", "mean", "min", "change"]],
|
||||||
metadata_id: int,
|
metadata: tuple[int, StatisticMetaData],
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Return max, mean and min during the period."""
|
"""Return max, mean and min during the period."""
|
||||||
# Calculate max, mean, min
|
# Calculate max, mean, min
|
||||||
|
mean_type = metadata[1]["mean_type"]
|
||||||
columns = select()
|
columns = select()
|
||||||
if "max" in types:
|
if "max" in types:
|
||||||
columns = columns.add_columns(func.max(table.max))
|
columns = columns.add_columns(func.max(table.max))
|
||||||
if "mean" in types:
|
if "mean" in types:
|
||||||
columns = columns.add_columns(func.avg(table.mean))
|
match mean_type:
|
||||||
columns = columns.add_columns(func.count(table.mean))
|
case StatisticMeanType.ARITHMETIC:
|
||||||
|
columns = columns.add_columns(func.avg(table.mean))
|
||||||
|
columns = columns.add_columns(func.count(table.mean))
|
||||||
|
case StatisticMeanType.CIRCULAR:
|
||||||
|
columns = columns.add_columns(*query_circular_mean(table))
|
||||||
if "min" in types:
|
if "min" in types:
|
||||||
columns = columns.add_columns(func.min(table.min))
|
columns = columns.add_columns(func.min(table.min))
|
||||||
|
|
||||||
stmt = _generate_max_mean_min_statistic_in_sub_period_stmt(
|
stmt = _generate_max_mean_min_statistic_in_sub_period_stmt(
|
||||||
columns, start_time, end_time, table, metadata_id
|
columns, start_time, end_time, table, metadata[0]
|
||||||
)
|
)
|
||||||
stats = cast(Sequence[Row[Any]], execute_stmt_lambda_element(session, stmt))
|
stats = cast(Sequence[Row[Any]], execute_stmt_lambda_element(session, stmt))
|
||||||
if not stats:
|
if not stats:
|
||||||
@ -1188,11 +1304,21 @@ def _get_max_mean_min_statistic_in_sub_period(
|
|||||||
if "max" in types and (new_max := stats[0].max) is not None:
|
if "max" in types and (new_max := stats[0].max) is not None:
|
||||||
old_max = result.get("max")
|
old_max = result.get("max")
|
||||||
result["max"] = max(new_max, old_max) if old_max is not None else new_max
|
result["max"] = max(new_max, old_max) if old_max is not None else new_max
|
||||||
if "mean" in types and stats[0].avg is not None:
|
if "mean" in types:
|
||||||
# https://github.com/sqlalchemy/sqlalchemy/issues/9127
|
# https://github.com/sqlalchemy/sqlalchemy/issues/9127
|
||||||
duration = stats[0].count * table.duration.total_seconds() # type: ignore[operator]
|
match mean_type:
|
||||||
result["duration"] = result.get("duration", 0.0) + duration
|
case StatisticMeanType.ARITHMETIC:
|
||||||
result["mean_acc"] = result.get("mean_acc", 0.0) + stats[0].avg * duration
|
duration = stats[0].count * table.duration.total_seconds() # type: ignore[operator]
|
||||||
|
if stats[0].avg is not None:
|
||||||
|
result["duration"] = result.get("duration", 0.0) + duration
|
||||||
|
result["mean_acc"] = (
|
||||||
|
result.get("mean_acc", 0.0) + stats[0].avg * duration
|
||||||
|
)
|
||||||
|
case StatisticMeanType.CIRCULAR:
|
||||||
|
if (new_circular_mean := stats[0].mean) is not None and (
|
||||||
|
weight := stats[0].mean_weight
|
||||||
|
) is not None:
|
||||||
|
result["circular_means"].append((new_circular_mean, weight))
|
||||||
if "min" in types and (new_min := stats[0].min) is not None:
|
if "min" in types and (new_min := stats[0].min) is not None:
|
||||||
old_min = result.get("min")
|
old_min = result.get("min")
|
||||||
result["min"] = min(new_min, old_min) if old_min is not None else new_min
|
result["min"] = min(new_min, old_min) if old_min is not None else new_min
|
||||||
@ -1207,15 +1333,15 @@ def _get_max_mean_min_statistic(
|
|||||||
tail_start_time: datetime | None,
|
tail_start_time: datetime | None,
|
||||||
tail_end_time: datetime | None,
|
tail_end_time: datetime | None,
|
||||||
tail_only: bool,
|
tail_only: bool,
|
||||||
metadata_id: int,
|
metadata: tuple[int, StatisticMetaData],
|
||||||
types: set[Literal["max", "mean", "min", "change"]],
|
types: set[Literal["max", "mean", "min", "change"]],
|
||||||
) -> dict[str, float | None]:
|
) -> dict[str, float | None]:
|
||||||
"""Return max, mean and min during the period.
|
"""Return max, mean and min during the period.
|
||||||
|
|
||||||
The mean is a time weighted average, combining hourly and 5-minute statistics if
|
The mean is time weighted, combining hourly and 5-minute statistics if
|
||||||
necessary.
|
necessary.
|
||||||
"""
|
"""
|
||||||
max_mean_min: dict[str, float] = {}
|
max_mean_min = _MaxMinMeanStatisticSubPeriod(circular_means=[])
|
||||||
result: dict[str, float | None] = {}
|
result: dict[str, float | None] = {}
|
||||||
|
|
||||||
if tail_start_time is not None:
|
if tail_start_time is not None:
|
||||||
@ -1227,7 +1353,7 @@ def _get_max_mean_min_statistic(
|
|||||||
tail_end_time,
|
tail_end_time,
|
||||||
StatisticsShortTerm,
|
StatisticsShortTerm,
|
||||||
types,
|
types,
|
||||||
metadata_id,
|
metadata,
|
||||||
)
|
)
|
||||||
|
|
||||||
if not tail_only:
|
if not tail_only:
|
||||||
@ -1238,7 +1364,7 @@ def _get_max_mean_min_statistic(
|
|||||||
main_end_time,
|
main_end_time,
|
||||||
Statistics,
|
Statistics,
|
||||||
types,
|
types,
|
||||||
metadata_id,
|
metadata,
|
||||||
)
|
)
|
||||||
|
|
||||||
if head_start_time is not None:
|
if head_start_time is not None:
|
||||||
@ -1249,16 +1375,23 @@ def _get_max_mean_min_statistic(
|
|||||||
head_end_time,
|
head_end_time,
|
||||||
StatisticsShortTerm,
|
StatisticsShortTerm,
|
||||||
types,
|
types,
|
||||||
metadata_id,
|
metadata,
|
||||||
)
|
)
|
||||||
|
|
||||||
if "max" in types:
|
if "max" in types:
|
||||||
result["max"] = max_mean_min.get("max")
|
result["max"] = max_mean_min.get("max")
|
||||||
if "mean" in types:
|
if "mean" in types:
|
||||||
if "mean_acc" not in max_mean_min:
|
mean_value = None
|
||||||
result["mean"] = None
|
match metadata[1]["mean_type"]:
|
||||||
else:
|
case StatisticMeanType.CIRCULAR:
|
||||||
result["mean"] = max_mean_min["mean_acc"] / max_mean_min["duration"]
|
if circular_means := max_mean_min["circular_means"]:
|
||||||
|
mean_value = weighted_circular_mean(circular_means)
|
||||||
|
case StatisticMeanType.ARITHMETIC:
|
||||||
|
if (mean_value := max_mean_min.get("mean_acc")) is not None and (
|
||||||
|
duration := max_mean_min.get("duration")
|
||||||
|
) is not None:
|
||||||
|
mean_value = mean_value / duration
|
||||||
|
result["mean"] = mean_value
|
||||||
if "min" in types:
|
if "min" in types:
|
||||||
result["min"] = max_mean_min.get("min")
|
result["min"] = max_mean_min.get("min")
|
||||||
return result
|
return result
|
||||||
@ -1559,7 +1692,7 @@ def statistic_during_period(
|
|||||||
tail_start_time,
|
tail_start_time,
|
||||||
tail_end_time,
|
tail_end_time,
|
||||||
tail_only,
|
tail_only,
|
||||||
metadata_id,
|
metadata,
|
||||||
types,
|
types,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -1642,7 +1775,7 @@ def _extract_metadata_and_discard_impossible_columns(
|
|||||||
has_sum = False
|
has_sum = False
|
||||||
for metadata_id, stats_metadata in metadata.values():
|
for metadata_id, stats_metadata in metadata.values():
|
||||||
metadata_ids.append(metadata_id)
|
metadata_ids.append(metadata_id)
|
||||||
has_mean |= stats_metadata["has_mean"]
|
has_mean |= stats_metadata["mean_type"] is not StatisticMeanType.NONE
|
||||||
has_sum |= stats_metadata["has_sum"]
|
has_sum |= stats_metadata["has_sum"]
|
||||||
if not has_mean:
|
if not has_mean:
|
||||||
types.discard("mean")
|
types.discard("mean")
|
||||||
@ -1798,13 +1931,13 @@ def _statistics_during_period_with_session(
|
|||||||
)
|
)
|
||||||
|
|
||||||
if period == "day":
|
if period == "day":
|
||||||
result = _reduce_statistics_per_day(result, types)
|
result = _reduce_statistics_per_day(result, types, metadata)
|
||||||
|
|
||||||
if period == "week":
|
if period == "week":
|
||||||
result = _reduce_statistics_per_week(result, types)
|
result = _reduce_statistics_per_week(result, types, metadata)
|
||||||
|
|
||||||
if period == "month":
|
if period == "month":
|
||||||
result = _reduce_statistics_per_month(result, types)
|
result = _reduce_statistics_per_month(result, types, metadata)
|
||||||
|
|
||||||
if "change" in _types:
|
if "change" in _types:
|
||||||
_augment_result_with_change(
|
_augment_result_with_change(
|
||||||
|
@ -4,16 +4,18 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
import threading
|
import threading
|
||||||
from typing import TYPE_CHECKING, Final, Literal
|
from typing import TYPE_CHECKING, Any, Final, Literal
|
||||||
|
|
||||||
from lru import LRU
|
from lru import LRU
|
||||||
from sqlalchemy import lambda_stmt, select
|
from sqlalchemy import lambda_stmt, select
|
||||||
|
from sqlalchemy.orm import InstrumentedAttribute
|
||||||
from sqlalchemy.orm.session import Session
|
from sqlalchemy.orm.session import Session
|
||||||
from sqlalchemy.sql.expression import true
|
from sqlalchemy.sql.expression import true
|
||||||
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
||||||
|
|
||||||
|
from ..const import CIRCULAR_MEAN_SCHEMA_VERSION
|
||||||
from ..db_schema import StatisticsMeta
|
from ..db_schema import StatisticsMeta
|
||||||
from ..models import StatisticMetaData
|
from ..models import StatisticMeanType, StatisticMetaData
|
||||||
from ..util import execute_stmt_lambda_element
|
from ..util import execute_stmt_lambda_element
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
@ -28,7 +30,6 @@ QUERY_STATISTIC_META = (
|
|||||||
StatisticsMeta.statistic_id,
|
StatisticsMeta.statistic_id,
|
||||||
StatisticsMeta.source,
|
StatisticsMeta.source,
|
||||||
StatisticsMeta.unit_of_measurement,
|
StatisticsMeta.unit_of_measurement,
|
||||||
StatisticsMeta.has_mean,
|
|
||||||
StatisticsMeta.has_sum,
|
StatisticsMeta.has_sum,
|
||||||
StatisticsMeta.name,
|
StatisticsMeta.name,
|
||||||
)
|
)
|
||||||
@ -37,24 +38,38 @@ INDEX_ID: Final = 0
|
|||||||
INDEX_STATISTIC_ID: Final = 1
|
INDEX_STATISTIC_ID: Final = 1
|
||||||
INDEX_SOURCE: Final = 2
|
INDEX_SOURCE: Final = 2
|
||||||
INDEX_UNIT_OF_MEASUREMENT: Final = 3
|
INDEX_UNIT_OF_MEASUREMENT: Final = 3
|
||||||
INDEX_HAS_MEAN: Final = 4
|
INDEX_HAS_SUM: Final = 4
|
||||||
INDEX_HAS_SUM: Final = 5
|
INDEX_NAME: Final = 5
|
||||||
INDEX_NAME: Final = 6
|
INDEX_MEAN_TYPE: Final = 6
|
||||||
|
|
||||||
|
|
||||||
def _generate_get_metadata_stmt(
|
def _generate_get_metadata_stmt(
|
||||||
statistic_ids: set[str] | None = None,
|
statistic_ids: set[str] | None = None,
|
||||||
statistic_type: Literal["mean", "sum"] | None = None,
|
statistic_type: Literal["mean", "sum"] | None = None,
|
||||||
statistic_source: str | None = None,
|
statistic_source: str | None = None,
|
||||||
|
schema_version: int = 0,
|
||||||
) -> StatementLambdaElement:
|
) -> StatementLambdaElement:
|
||||||
"""Generate a statement to fetch metadata."""
|
"""Generate a statement to fetch metadata with the passed filters.
|
||||||
stmt = lambda_stmt(lambda: select(*QUERY_STATISTIC_META))
|
|
||||||
|
Depending on the schema version, either mean_type (added in version 49) or has_mean column is used.
|
||||||
|
"""
|
||||||
|
columns: list[InstrumentedAttribute[Any]] = list(QUERY_STATISTIC_META)
|
||||||
|
if schema_version >= CIRCULAR_MEAN_SCHEMA_VERSION:
|
||||||
|
columns.append(StatisticsMeta.mean_type)
|
||||||
|
else:
|
||||||
|
columns.append(StatisticsMeta.has_mean)
|
||||||
|
stmt = lambda_stmt(lambda: select(*columns))
|
||||||
if statistic_ids:
|
if statistic_ids:
|
||||||
stmt += lambda q: q.where(StatisticsMeta.statistic_id.in_(statistic_ids))
|
stmt += lambda q: q.where(StatisticsMeta.statistic_id.in_(statistic_ids))
|
||||||
if statistic_source is not None:
|
if statistic_source is not None:
|
||||||
stmt += lambda q: q.where(StatisticsMeta.source == statistic_source)
|
stmt += lambda q: q.where(StatisticsMeta.source == statistic_source)
|
||||||
if statistic_type == "mean":
|
if statistic_type == "mean":
|
||||||
stmt += lambda q: q.where(StatisticsMeta.has_mean == true())
|
if schema_version >= CIRCULAR_MEAN_SCHEMA_VERSION:
|
||||||
|
stmt += lambda q: q.where(
|
||||||
|
StatisticsMeta.mean_type != StatisticMeanType.NONE
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
stmt += lambda q: q.where(StatisticsMeta.has_mean == true())
|
||||||
elif statistic_type == "sum":
|
elif statistic_type == "sum":
|
||||||
stmt += lambda q: q.where(StatisticsMeta.has_sum == true())
|
stmt += lambda q: q.where(StatisticsMeta.has_sum == true())
|
||||||
return stmt
|
return stmt
|
||||||
@ -100,14 +115,34 @@ class StatisticsMetaManager:
|
|||||||
for row in execute_stmt_lambda_element(
|
for row in execute_stmt_lambda_element(
|
||||||
session,
|
session,
|
||||||
_generate_get_metadata_stmt(
|
_generate_get_metadata_stmt(
|
||||||
statistic_ids, statistic_type, statistic_source
|
statistic_ids,
|
||||||
|
statistic_type,
|
||||||
|
statistic_source,
|
||||||
|
self.recorder.schema_version,
|
||||||
),
|
),
|
||||||
orm_rows=False,
|
orm_rows=False,
|
||||||
):
|
):
|
||||||
statistic_id = row[INDEX_STATISTIC_ID]
|
statistic_id = row[INDEX_STATISTIC_ID]
|
||||||
row_id = row[INDEX_ID]
|
row_id = row[INDEX_ID]
|
||||||
|
if self.recorder.schema_version >= CIRCULAR_MEAN_SCHEMA_VERSION:
|
||||||
|
try:
|
||||||
|
mean_type = StatisticMeanType(row[INDEX_MEAN_TYPE])
|
||||||
|
except ValueError:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Invalid mean type found for statistic_id: %s, mean_type: %s. Skipping",
|
||||||
|
statistic_id,
|
||||||
|
row[INDEX_MEAN_TYPE],
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
mean_type = (
|
||||||
|
StatisticMeanType.ARITHMETIC
|
||||||
|
if row[INDEX_MEAN_TYPE]
|
||||||
|
else StatisticMeanType.NONE
|
||||||
|
)
|
||||||
meta = {
|
meta = {
|
||||||
"has_mean": row[INDEX_HAS_MEAN],
|
"has_mean": mean_type is StatisticMeanType.ARITHMETIC,
|
||||||
|
"mean_type": mean_type,
|
||||||
"has_sum": row[INDEX_HAS_SUM],
|
"has_sum": row[INDEX_HAS_SUM],
|
||||||
"name": row[INDEX_NAME],
|
"name": row[INDEX_NAME],
|
||||||
"source": row[INDEX_SOURCE],
|
"source": row[INDEX_SOURCE],
|
||||||
@ -157,9 +192,18 @@ class StatisticsMetaManager:
|
|||||||
This call is not thread-safe and must be called from the
|
This call is not thread-safe and must be called from the
|
||||||
recorder thread.
|
recorder thread.
|
||||||
"""
|
"""
|
||||||
|
if "mean_type" not in new_metadata:
|
||||||
|
# To maintain backward compatibility after adding 'mean_type' in schema version 49,
|
||||||
|
# we must still check for its presence. Even though type hints suggest it should always exist,
|
||||||
|
# custom integrations might omit it, so we need to guard against that.
|
||||||
|
new_metadata["mean_type"] = ( # type: ignore[unreachable]
|
||||||
|
StatisticMeanType.ARITHMETIC
|
||||||
|
if new_metadata["has_mean"]
|
||||||
|
else StatisticMeanType.NONE
|
||||||
|
)
|
||||||
metadata_id, old_metadata = old_metadata_dict[statistic_id]
|
metadata_id, old_metadata = old_metadata_dict[statistic_id]
|
||||||
if not (
|
if not (
|
||||||
old_metadata["has_mean"] != new_metadata["has_mean"]
|
old_metadata["mean_type"] != new_metadata["mean_type"]
|
||||||
or old_metadata["has_sum"] != new_metadata["has_sum"]
|
or old_metadata["has_sum"] != new_metadata["has_sum"]
|
||||||
or old_metadata["name"] != new_metadata["name"]
|
or old_metadata["name"] != new_metadata["name"]
|
||||||
or old_metadata["unit_of_measurement"]
|
or old_metadata["unit_of_measurement"]
|
||||||
@ -170,7 +214,7 @@ class StatisticsMetaManager:
|
|||||||
self._assert_in_recorder_thread()
|
self._assert_in_recorder_thread()
|
||||||
session.query(StatisticsMeta).filter_by(statistic_id=statistic_id).update(
|
session.query(StatisticsMeta).filter_by(statistic_id=statistic_id).update(
|
||||||
{
|
{
|
||||||
StatisticsMeta.has_mean: new_metadata["has_mean"],
|
StatisticsMeta.mean_type: new_metadata["mean_type"],
|
||||||
StatisticsMeta.has_sum: new_metadata["has_sum"],
|
StatisticsMeta.has_sum: new_metadata["has_sum"],
|
||||||
StatisticsMeta.name: new_metadata["name"],
|
StatisticsMeta.name: new_metadata["name"],
|
||||||
StatisticsMeta.unit_of_measurement: new_metadata["unit_of_measurement"],
|
StatisticsMeta.unit_of_measurement: new_metadata["unit_of_measurement"],
|
||||||
|
@ -37,7 +37,7 @@ from homeassistant.util.unit_conversion import (
|
|||||||
VolumeFlowRateConverter,
|
VolumeFlowRateConverter,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .models import StatisticPeriod
|
from .models import StatisticMeanType, StatisticPeriod
|
||||||
from .statistics import (
|
from .statistics import (
|
||||||
STATISTIC_UNIT_TO_UNIT_CONVERTER,
|
STATISTIC_UNIT_TO_UNIT_CONVERTER,
|
||||||
async_add_external_statistics,
|
async_add_external_statistics,
|
||||||
@ -532,6 +532,10 @@ def ws_import_statistics(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Import statistics."""
|
"""Import statistics."""
|
||||||
metadata = msg["metadata"]
|
metadata = msg["metadata"]
|
||||||
|
# The WS command will be changed in a follow up PR
|
||||||
|
metadata["mean_type"] = (
|
||||||
|
StatisticMeanType.ARITHMETIC if metadata["has_mean"] else StatisticMeanType.NONE
|
||||||
|
)
|
||||||
stats = msg["stats"]
|
stats = msg["stats"]
|
||||||
|
|
||||||
if valid_entity_id(metadata["statistic_id"]):
|
if valid_entity_id(metadata["statistic_id"]):
|
||||||
|
@ -491,6 +491,9 @@ class SensorStateClass(StrEnum):
|
|||||||
MEASUREMENT = "measurement"
|
MEASUREMENT = "measurement"
|
||||||
"""The state represents a measurement in present time."""
|
"""The state represents a measurement in present time."""
|
||||||
|
|
||||||
|
MEASUREMENT_ANGLE = "measurement_angle"
|
||||||
|
"""The state represents a angle measurement in present time. Currently only degrees are supported."""
|
||||||
|
|
||||||
TOTAL = "total"
|
TOTAL = "total"
|
||||||
"""The state represents a total amount.
|
"""The state represents a total amount.
|
||||||
|
|
||||||
@ -693,6 +696,6 @@ DEVICE_CLASS_STATE_CLASSES: dict[SensorDeviceClass, set[SensorStateClass]] = {
|
|||||||
SensorStateClass.TOTAL,
|
SensorStateClass.TOTAL,
|
||||||
SensorStateClass.TOTAL_INCREASING,
|
SensorStateClass.TOTAL_INCREASING,
|
||||||
},
|
},
|
||||||
SensorDeviceClass.WIND_DIRECTION: set(),
|
SensorDeviceClass.WIND_DIRECTION: {SensorStateClass.MEASUREMENT_ANGLE},
|
||||||
SensorDeviceClass.WIND_SPEED: {SensorStateClass.MEASUREMENT},
|
SensorDeviceClass.WIND_SPEED: {SensorStateClass.MEASUREMENT},
|
||||||
}
|
}
|
||||||
|
@ -5,6 +5,7 @@ from __future__ import annotations
|
|||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from collections.abc import Callable, Iterable
|
from collections.abc import Callable, Iterable
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
|
from dataclasses import dataclass
|
||||||
import datetime
|
import datetime
|
||||||
import itertools
|
import itertools
|
||||||
import logging
|
import logging
|
||||||
@ -21,6 +22,7 @@ from homeassistant.components.recorder import (
|
|||||||
)
|
)
|
||||||
from homeassistant.components.recorder.models import (
|
from homeassistant.components.recorder.models import (
|
||||||
StatisticData,
|
StatisticData,
|
||||||
|
StatisticMeanType,
|
||||||
StatisticMetaData,
|
StatisticMetaData,
|
||||||
StatisticResult,
|
StatisticResult,
|
||||||
)
|
)
|
||||||
@ -52,10 +54,22 @@ from .const import (
|
|||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class _StatisticsConfig:
|
||||||
|
types: set[str]
|
||||||
|
mean_type: StatisticMeanType = StatisticMeanType.NONE
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_STATISTICS = {
|
DEFAULT_STATISTICS = {
|
||||||
SensorStateClass.MEASUREMENT: {"mean", "min", "max"},
|
SensorStateClass.MEASUREMENT: _StatisticsConfig(
|
||||||
SensorStateClass.TOTAL: {"sum"},
|
{"mean", "min", "max"}, StatisticMeanType.ARITHMETIC
|
||||||
SensorStateClass.TOTAL_INCREASING: {"sum"},
|
),
|
||||||
|
SensorStateClass.MEASUREMENT_ANGLE: _StatisticsConfig(
|
||||||
|
{"mean"}, StatisticMeanType.CIRCULAR
|
||||||
|
),
|
||||||
|
SensorStateClass.TOTAL: _StatisticsConfig({"sum"}),
|
||||||
|
SensorStateClass.TOTAL_INCREASING: _StatisticsConfig({"sum"}),
|
||||||
}
|
}
|
||||||
|
|
||||||
EQUIVALENT_UNITS = {
|
EQUIVALENT_UNITS = {
|
||||||
@ -76,10 +90,15 @@ WARN_NEGATIVE: HassKey[set[str]] = HassKey(f"{DOMAIN}_warn_total_increasing_nega
|
|||||||
# Keep track of entities for which a warning about unsupported unit has been logged
|
# Keep track of entities for which a warning about unsupported unit has been logged
|
||||||
WARN_UNSUPPORTED_UNIT: HassKey[set[str]] = HassKey(f"{DOMAIN}_warn_unsupported_unit")
|
WARN_UNSUPPORTED_UNIT: HassKey[set[str]] = HassKey(f"{DOMAIN}_warn_unsupported_unit")
|
||||||
WARN_UNSTABLE_UNIT: HassKey[set[str]] = HassKey(f"{DOMAIN}_warn_unstable_unit")
|
WARN_UNSTABLE_UNIT: HassKey[set[str]] = HassKey(f"{DOMAIN}_warn_unstable_unit")
|
||||||
|
# Keep track of entities for which a warning about statistics mean algorithm change has been logged
|
||||||
|
WARN_STATISTICS_MEAN_CHANGED: HassKey[set[str]] = HassKey(
|
||||||
|
f"{DOMAIN}_warn_statistics_mean_change"
|
||||||
|
)
|
||||||
# Link to dev statistics where issues around LTS can be fixed
|
# Link to dev statistics where issues around LTS can be fixed
|
||||||
LINK_DEV_STATISTICS = "https://my.home-assistant.io/redirect/developer_statistics"
|
LINK_DEV_STATISTICS = "https://my.home-assistant.io/redirect/developer_statistics"
|
||||||
STATE_CLASS_REMOVED_ISSUE = "state_class_removed"
|
STATE_CLASS_REMOVED_ISSUE = "state_class_removed"
|
||||||
UNITS_CHANGED_ISSUE = "units_changed"
|
UNITS_CHANGED_ISSUE = "units_changed"
|
||||||
|
MEAN_TYPE_CHANGED_ISSUE = "mean_type_changed"
|
||||||
|
|
||||||
|
|
||||||
def _get_sensor_states(hass: HomeAssistant) -> list[State]:
|
def _get_sensor_states(hass: HomeAssistant) -> list[State]:
|
||||||
@ -101,7 +120,7 @@ def _get_sensor_states(hass: HomeAssistant) -> list[State]:
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def _time_weighted_average(
|
def _time_weighted_arithmetic_mean(
|
||||||
fstates: list[tuple[float, State]], start: datetime.datetime, end: datetime.datetime
|
fstates: list[tuple[float, State]], start: datetime.datetime, end: datetime.datetime
|
||||||
) -> float:
|
) -> float:
|
||||||
"""Calculate a time weighted average.
|
"""Calculate a time weighted average.
|
||||||
@ -139,6 +158,43 @@ def _time_weighted_average(
|
|||||||
return accumulated / (end - start).total_seconds()
|
return accumulated / (end - start).total_seconds()
|
||||||
|
|
||||||
|
|
||||||
|
def _time_weighted_circular_mean(
|
||||||
|
fstates: list[tuple[float, State]], start: datetime.datetime, end: datetime.datetime
|
||||||
|
) -> float:
|
||||||
|
"""Calculate a time weighted circular mean.
|
||||||
|
|
||||||
|
The circular mean is calculated by weighting the states by duration in seconds between
|
||||||
|
state changes.
|
||||||
|
Note: there's no interpolation of values between state changes.
|
||||||
|
"""
|
||||||
|
old_fstate: float | None = None
|
||||||
|
old_start_time: datetime.datetime | None = None
|
||||||
|
values: list[tuple[float, float]] = []
|
||||||
|
|
||||||
|
for fstate, state in fstates:
|
||||||
|
# The recorder will give us the last known state, which may be well
|
||||||
|
# before the requested start time for the statistics
|
||||||
|
start_time = max(state.last_updated, start)
|
||||||
|
if old_start_time is None:
|
||||||
|
# Adjust start time, if there was no last known state
|
||||||
|
start = start_time
|
||||||
|
else:
|
||||||
|
duration = (start_time - old_start_time).total_seconds()
|
||||||
|
assert old_fstate is not None
|
||||||
|
values.append((old_fstate, duration))
|
||||||
|
|
||||||
|
old_fstate = fstate
|
||||||
|
old_start_time = start_time
|
||||||
|
|
||||||
|
if old_fstate is not None:
|
||||||
|
# Add last value weighted by duration until end of the period
|
||||||
|
assert old_start_time is not None
|
||||||
|
duration = (end - old_start_time).total_seconds()
|
||||||
|
values.append((old_fstate, duration))
|
||||||
|
|
||||||
|
return statistics.weighted_circular_mean(values)
|
||||||
|
|
||||||
|
|
||||||
def _get_units(fstates: list[tuple[float, State]]) -> set[str | None]:
|
def _get_units(fstates: list[tuple[float, State]]) -> set[str | None]:
|
||||||
"""Return a set of all units."""
|
"""Return a set of all units."""
|
||||||
return {item[1].attributes.get(ATTR_UNIT_OF_MEASUREMENT) for item in fstates}
|
return {item[1].attributes.get(ATTR_UNIT_OF_MEASUREMENT) for item in fstates}
|
||||||
@ -364,7 +420,7 @@ def reset_detected(
|
|||||||
return fstate < 0.9 * previous_fstate
|
return fstate < 0.9 * previous_fstate
|
||||||
|
|
||||||
|
|
||||||
def _wanted_statistics(sensor_states: list[State]) -> dict[str, set[str]]:
|
def _wanted_statistics(sensor_states: list[State]) -> dict[str, _StatisticsConfig]:
|
||||||
"""Prepare a dict with wanted statistics for entities."""
|
"""Prepare a dict with wanted statistics for entities."""
|
||||||
return {
|
return {
|
||||||
state.entity_id: DEFAULT_STATISTICS[state.attributes[ATTR_STATE_CLASS]]
|
state.entity_id: DEFAULT_STATISTICS[state.attributes[ATTR_STATE_CLASS]]
|
||||||
@ -408,7 +464,9 @@ def compile_statistics( # noqa: C901
|
|||||||
wanted_statistics = _wanted_statistics(sensor_states)
|
wanted_statistics = _wanted_statistics(sensor_states)
|
||||||
# Get history between start and end
|
# Get history between start and end
|
||||||
entities_full_history = [
|
entities_full_history = [
|
||||||
i.entity_id for i in sensor_states if "sum" in wanted_statistics[i.entity_id]
|
i.entity_id
|
||||||
|
for i in sensor_states
|
||||||
|
if "sum" in wanted_statistics[i.entity_id].types
|
||||||
]
|
]
|
||||||
history_list: dict[str, list[State]] = {}
|
history_list: dict[str, list[State]] = {}
|
||||||
if entities_full_history:
|
if entities_full_history:
|
||||||
@ -423,7 +481,7 @@ def compile_statistics( # noqa: C901
|
|||||||
entities_significant_history = [
|
entities_significant_history = [
|
||||||
i.entity_id
|
i.entity_id
|
||||||
for i in sensor_states
|
for i in sensor_states
|
||||||
if "sum" not in wanted_statistics[i.entity_id]
|
if "sum" not in wanted_statistics[i.entity_id].types
|
||||||
]
|
]
|
||||||
if entities_significant_history:
|
if entities_significant_history:
|
||||||
_history_list = history.get_full_significant_states_with_session(
|
_history_list = history.get_full_significant_states_with_session(
|
||||||
@ -473,7 +531,7 @@ def compile_statistics( # noqa: C901
|
|||||||
continue
|
continue
|
||||||
state_class: str = _state.attributes[ATTR_STATE_CLASS]
|
state_class: str = _state.attributes[ATTR_STATE_CLASS]
|
||||||
to_process.append((entity_id, statistics_unit, state_class, valid_float_states))
|
to_process.append((entity_id, statistics_unit, state_class, valid_float_states))
|
||||||
if "sum" in wanted_statistics[entity_id]:
|
if "sum" in wanted_statistics[entity_id].types:
|
||||||
to_query.add(entity_id)
|
to_query.add(entity_id)
|
||||||
|
|
||||||
last_stats = statistics.get_latest_short_term_statistics_with_session(
|
last_stats = statistics.get_latest_short_term_statistics_with_session(
|
||||||
@ -485,6 +543,10 @@ def compile_statistics( # noqa: C901
|
|||||||
state_class,
|
state_class,
|
||||||
valid_float_states,
|
valid_float_states,
|
||||||
) in to_process:
|
) in to_process:
|
||||||
|
mean_type = StatisticMeanType.NONE
|
||||||
|
if "mean" in wanted_statistics[entity_id].types:
|
||||||
|
mean_type = wanted_statistics[entity_id].mean_type
|
||||||
|
|
||||||
# Check metadata
|
# Check metadata
|
||||||
if old_metadata := old_metadatas.get(entity_id):
|
if old_metadata := old_metadatas.get(entity_id):
|
||||||
if not _equivalent_units(
|
if not _equivalent_units(
|
||||||
@ -510,10 +572,34 @@ def compile_statistics( # noqa: C901
|
|||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
if (
|
||||||
|
mean_type is not StatisticMeanType.NONE
|
||||||
|
and (old_mean_type := old_metadata[1]["mean_type"])
|
||||||
|
is not StatisticMeanType.NONE
|
||||||
|
and mean_type != old_mean_type
|
||||||
|
):
|
||||||
|
if WARN_STATISTICS_MEAN_CHANGED not in hass.data:
|
||||||
|
hass.data[WARN_STATISTICS_MEAN_CHANGED] = set()
|
||||||
|
if entity_id not in hass.data[WARN_STATISTICS_MEAN_CHANGED]:
|
||||||
|
hass.data[WARN_STATISTICS_MEAN_CHANGED].add(entity_id)
|
||||||
|
_LOGGER.warning(
|
||||||
|
(
|
||||||
|
"The statistics mean algorithm for %s have changed from %s to %s."
|
||||||
|
" Generation of long term statistics will be suppressed"
|
||||||
|
" unless it changes back or go to %s to delete the old"
|
||||||
|
" statistics"
|
||||||
|
),
|
||||||
|
entity_id,
|
||||||
|
old_mean_type.name,
|
||||||
|
mean_type.name,
|
||||||
|
LINK_DEV_STATISTICS,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
# Set meta data
|
# Set meta data
|
||||||
meta: StatisticMetaData = {
|
meta: StatisticMetaData = {
|
||||||
"has_mean": "mean" in wanted_statistics[entity_id],
|
"mean_type": mean_type,
|
||||||
"has_sum": "sum" in wanted_statistics[entity_id],
|
"has_sum": "sum" in wanted_statistics[entity_id].types,
|
||||||
"name": None,
|
"name": None,
|
||||||
"source": RECORDER_DOMAIN,
|
"source": RECORDER_DOMAIN,
|
||||||
"statistic_id": entity_id,
|
"statistic_id": entity_id,
|
||||||
@ -522,19 +608,26 @@ def compile_statistics( # noqa: C901
|
|||||||
|
|
||||||
# Make calculations
|
# Make calculations
|
||||||
stat: StatisticData = {"start": start}
|
stat: StatisticData = {"start": start}
|
||||||
if "max" in wanted_statistics[entity_id]:
|
if "max" in wanted_statistics[entity_id].types:
|
||||||
stat["max"] = max(
|
stat["max"] = max(
|
||||||
*itertools.islice(zip(*valid_float_states, strict=False), 1)
|
*itertools.islice(zip(*valid_float_states, strict=False), 1)
|
||||||
)
|
)
|
||||||
if "min" in wanted_statistics[entity_id]:
|
if "min" in wanted_statistics[entity_id].types:
|
||||||
stat["min"] = min(
|
stat["min"] = min(
|
||||||
*itertools.islice(zip(*valid_float_states, strict=False), 1)
|
*itertools.islice(zip(*valid_float_states, strict=False), 1)
|
||||||
)
|
)
|
||||||
|
|
||||||
if "mean" in wanted_statistics[entity_id]:
|
match mean_type:
|
||||||
stat["mean"] = _time_weighted_average(valid_float_states, start, end)
|
case StatisticMeanType.ARITHMETIC:
|
||||||
|
stat["mean"] = _time_weighted_arithmetic_mean(
|
||||||
|
valid_float_states, start, end
|
||||||
|
)
|
||||||
|
case StatisticMeanType.CIRCULAR:
|
||||||
|
stat["mean"] = _time_weighted_circular_mean(
|
||||||
|
valid_float_states, start, end
|
||||||
|
)
|
||||||
|
|
||||||
if "sum" in wanted_statistics[entity_id]:
|
if "sum" in wanted_statistics[entity_id].types:
|
||||||
last_reset = old_last_reset = None
|
last_reset = old_last_reset = None
|
||||||
new_state = old_state = None
|
new_state = old_state = None
|
||||||
_sum = 0.0
|
_sum = 0.0
|
||||||
@ -658,18 +751,25 @@ def list_statistic_ids(
|
|||||||
attributes = state.attributes
|
attributes = state.attributes
|
||||||
state_class = attributes[ATTR_STATE_CLASS]
|
state_class = attributes[ATTR_STATE_CLASS]
|
||||||
provided_statistics = DEFAULT_STATISTICS[state_class]
|
provided_statistics = DEFAULT_STATISTICS[state_class]
|
||||||
if statistic_type is not None and statistic_type not in provided_statistics:
|
if (
|
||||||
|
statistic_type is not None
|
||||||
|
and statistic_type not in provided_statistics.types
|
||||||
|
):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if (
|
if (
|
||||||
(has_sum := "sum" in provided_statistics)
|
(has_sum := "sum" in provided_statistics.types)
|
||||||
and ATTR_LAST_RESET not in attributes
|
and ATTR_LAST_RESET not in attributes
|
||||||
and state_class == SensorStateClass.MEASUREMENT
|
and state_class == SensorStateClass.MEASUREMENT
|
||||||
):
|
):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
mean_type = StatisticMeanType.NONE
|
||||||
|
if "mean" in provided_statistics.types:
|
||||||
|
mean_type = provided_statistics.mean_type
|
||||||
|
|
||||||
result[entity_id] = {
|
result[entity_id] = {
|
||||||
"has_mean": "mean" in provided_statistics,
|
"mean_type": mean_type,
|
||||||
"has_sum": has_sum,
|
"has_sum": has_sum,
|
||||||
"name": None,
|
"name": None,
|
||||||
"source": RECORDER_DOMAIN,
|
"source": RECORDER_DOMAIN,
|
||||||
@ -734,6 +834,23 @@ def _update_issues(
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
(metadata_mean_type := metadata[1]["mean_type"]) is not None
|
||||||
|
and state_class
|
||||||
|
and (state_mean_type := DEFAULT_STATISTICS[state_class].mean_type)
|
||||||
|
!= metadata_mean_type
|
||||||
|
):
|
||||||
|
# The mean type has changed and the old statistics are not valid anymore
|
||||||
|
report_issue(
|
||||||
|
MEAN_TYPE_CHANGED_ISSUE,
|
||||||
|
entity_id,
|
||||||
|
{
|
||||||
|
"statistic_id": entity_id,
|
||||||
|
"metadata_mean_type": metadata_mean_type,
|
||||||
|
"state_mean_type": state_mean_type,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def update_statistics_issues(
|
def update_statistics_issues(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
@ -756,7 +873,11 @@ def update_statistics_issues(
|
|||||||
issue.domain != DOMAIN
|
issue.domain != DOMAIN
|
||||||
or not (issue_data := issue.data)
|
or not (issue_data := issue.data)
|
||||||
or issue_data.get("issue_type")
|
or issue_data.get("issue_type")
|
||||||
not in (STATE_CLASS_REMOVED_ISSUE, UNITS_CHANGED_ISSUE)
|
not in (
|
||||||
|
STATE_CLASS_REMOVED_ISSUE,
|
||||||
|
UNITS_CHANGED_ISSUE,
|
||||||
|
MEAN_TYPE_CHANGED_ISSUE,
|
||||||
|
)
|
||||||
):
|
):
|
||||||
continue
|
continue
|
||||||
issues.add(issue.issue_id)
|
issues.add(issue.issue_id)
|
||||||
|
@ -309,6 +309,10 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"issues": {
|
"issues": {
|
||||||
|
"mean_type_changed": {
|
||||||
|
"title": "The mean type of {statistic_id} has changed",
|
||||||
|
"description": ""
|
||||||
|
},
|
||||||
"state_class_removed": {
|
"state_class_removed": {
|
||||||
"title": "{statistic_id} no longer has a state class",
|
"title": "{statistic_id} no longer has a state class",
|
||||||
"description": ""
|
"description": ""
|
||||||
|
@ -9,7 +9,11 @@ from typing import cast
|
|||||||
import tibber
|
import tibber
|
||||||
|
|
||||||
from homeassistant.components.recorder import get_instance
|
from homeassistant.components.recorder import get_instance
|
||||||
from homeassistant.components.recorder.models import StatisticData, StatisticMetaData
|
from homeassistant.components.recorder.models import (
|
||||||
|
StatisticData,
|
||||||
|
StatisticMeanType,
|
||||||
|
StatisticMetaData,
|
||||||
|
)
|
||||||
from homeassistant.components.recorder.statistics import (
|
from homeassistant.components.recorder.statistics import (
|
||||||
async_add_external_statistics,
|
async_add_external_statistics,
|
||||||
get_last_statistics,
|
get_last_statistics,
|
||||||
@ -159,7 +163,7 @@ class TibberDataCoordinator(DataUpdateCoordinator[None]):
|
|||||||
)
|
)
|
||||||
|
|
||||||
metadata = StatisticMetaData(
|
metadata = StatisticMetaData(
|
||||||
has_mean=False,
|
mean_type=StatisticMeanType.NONE,
|
||||||
has_sum=True,
|
has_sum=True,
|
||||||
name=f"{home.name} {sensor_type}",
|
name=f"{home.name} {sensor_type}",
|
||||||
source=TIBBER_DOMAIN,
|
source=TIBBER_DOMAIN,
|
||||||
|
@ -48,5 +48,15 @@
|
|||||||
'type': 'no_state',
|
'type': 'no_state',
|
||||||
}),
|
}),
|
||||||
]),
|
]),
|
||||||
|
'sensor.statistics_issues_issue_5': list([
|
||||||
|
dict({
|
||||||
|
'data': dict({
|
||||||
|
'metadata_mean_type': 1,
|
||||||
|
'state_mean_type': 2,
|
||||||
|
'statistic_id': 'sensor.statistics_issues_issue_5',
|
||||||
|
}),
|
||||||
|
'type': 'mean_type_changed',
|
||||||
|
}),
|
||||||
|
]),
|
||||||
})
|
})
|
||||||
# ---
|
# ---
|
||||||
|
@ -29,6 +29,20 @@
|
|||||||
'last_updated': <ANY>,
|
'last_updated': <ANY>,
|
||||||
'state': '1500',
|
'state': '1500',
|
||||||
}),
|
}),
|
||||||
|
StateSnapshot({
|
||||||
|
'attributes': ReadOnlyDict({
|
||||||
|
'device_class': 'wind_direction',
|
||||||
|
'friendly_name': 'Statistics issues Issue 5',
|
||||||
|
'state_class': <SensorStateClass.MEASUREMENT_ANGLE: 'measurement_angle'>,
|
||||||
|
'unit_of_measurement': '°',
|
||||||
|
}),
|
||||||
|
'context': <ANY>,
|
||||||
|
'entity_id': 'sensor.statistics_issues_issue_5',
|
||||||
|
'last_changed': <ANY>,
|
||||||
|
'last_reported': <ANY>,
|
||||||
|
'last_updated': <ANY>,
|
||||||
|
'state': '100',
|
||||||
|
}),
|
||||||
StateSnapshot({
|
StateSnapshot({
|
||||||
'attributes': ReadOnlyDict({
|
'attributes': ReadOnlyDict({
|
||||||
'friendly_name': 'Statistics issues Issue 1',
|
'friendly_name': 'Statistics issues Issue 1',
|
||||||
@ -99,6 +113,20 @@
|
|||||||
'last_updated': <ANY>,
|
'last_updated': <ANY>,
|
||||||
'state': '1500',
|
'state': '1500',
|
||||||
}),
|
}),
|
||||||
|
StateSnapshot({
|
||||||
|
'attributes': ReadOnlyDict({
|
||||||
|
'device_class': 'wind_direction',
|
||||||
|
'friendly_name': 'Statistics issues Issue 5',
|
||||||
|
'state_class': <SensorStateClass.MEASUREMENT_ANGLE: 'measurement_angle'>,
|
||||||
|
'unit_of_measurement': '°',
|
||||||
|
}),
|
||||||
|
'context': <ANY>,
|
||||||
|
'entity_id': 'sensor.statistics_issues_issue_5',
|
||||||
|
'last_changed': <ANY>,
|
||||||
|
'last_reported': <ANY>,
|
||||||
|
'last_updated': <ANY>,
|
||||||
|
'state': '100',
|
||||||
|
}),
|
||||||
StateSnapshot({
|
StateSnapshot({
|
||||||
'attributes': ReadOnlyDict({
|
'attributes': ReadOnlyDict({
|
||||||
'friendly_name': 'Sensor test',
|
'friendly_name': 'Sensor test',
|
||||||
|
@ -11,6 +11,7 @@ import voluptuous as vol
|
|||||||
from homeassistant.components.kitchen_sink import DOMAIN
|
from homeassistant.components.kitchen_sink import DOMAIN
|
||||||
from homeassistant.components.recorder import get_instance
|
from homeassistant.components.recorder import get_instance
|
||||||
from homeassistant.components.recorder.statistics import (
|
from homeassistant.components.recorder.statistics import (
|
||||||
|
StatisticMeanType,
|
||||||
async_add_external_statistics,
|
async_add_external_statistics,
|
||||||
get_last_statistics,
|
get_last_statistics,
|
||||||
list_statistic_ids,
|
list_statistic_ids,
|
||||||
@ -45,6 +46,7 @@ async def test_demo_statistics(hass: HomeAssistant) -> None:
|
|||||||
assert {
|
assert {
|
||||||
"display_unit_of_measurement": "°C",
|
"display_unit_of_measurement": "°C",
|
||||||
"has_mean": True,
|
"has_mean": True,
|
||||||
|
"mean_type": StatisticMeanType.ARITHMETIC,
|
||||||
"has_sum": False,
|
"has_sum": False,
|
||||||
"name": "Outdoor temperature",
|
"name": "Outdoor temperature",
|
||||||
"source": DOMAIN,
|
"source": DOMAIN,
|
||||||
@ -55,6 +57,7 @@ async def test_demo_statistics(hass: HomeAssistant) -> None:
|
|||||||
assert {
|
assert {
|
||||||
"display_unit_of_measurement": "kWh",
|
"display_unit_of_measurement": "kWh",
|
||||||
"has_mean": False,
|
"has_mean": False,
|
||||||
|
"mean_type": StatisticMeanType.NONE,
|
||||||
"has_sum": True,
|
"has_sum": True,
|
||||||
"name": "Energy consumption 1",
|
"name": "Energy consumption 1",
|
||||||
"source": DOMAIN,
|
"source": DOMAIN,
|
||||||
|
@ -87,6 +87,7 @@ async def test_validate_db_schema_fix_float_issue(
|
|||||||
"created_ts DOUBLE PRECISION",
|
"created_ts DOUBLE PRECISION",
|
||||||
"start_ts DOUBLE PRECISION",
|
"start_ts DOUBLE PRECISION",
|
||||||
"mean DOUBLE PRECISION",
|
"mean DOUBLE PRECISION",
|
||||||
|
"mean_weight DOUBLE PRECISION",
|
||||||
"min DOUBLE PRECISION",
|
"min DOUBLE PRECISION",
|
||||||
"max DOUBLE PRECISION",
|
"max DOUBLE PRECISION",
|
||||||
"last_reset_ts DOUBLE PRECISION",
|
"last_reset_ts DOUBLE PRECISION",
|
||||||
|
@ -35,7 +35,8 @@ from homeassistant.components.recorder.db_schema import (
|
|||||||
StatesMeta,
|
StatesMeta,
|
||||||
)
|
)
|
||||||
from homeassistant.components.recorder.tasks import RecorderTask, StatisticsTask
|
from homeassistant.components.recorder.tasks import RecorderTask, StatisticsTask
|
||||||
from homeassistant.const import UnitOfTemperature
|
from homeassistant.components.sensor import SensorDeviceClass, SensorStateClass
|
||||||
|
from homeassistant.const import DEGREE, UnitOfTemperature
|
||||||
from homeassistant.core import Event, HomeAssistant, State
|
from homeassistant.core import Event, HomeAssistant, State
|
||||||
from homeassistant.helpers import recorder as recorder_helper
|
from homeassistant.helpers import recorder as recorder_helper
|
||||||
from homeassistant.util import dt as dt_util
|
from homeassistant.util import dt as dt_util
|
||||||
@ -290,6 +291,7 @@ def record_states(
|
|||||||
sns2 = "sensor.test2"
|
sns2 = "sensor.test2"
|
||||||
sns3 = "sensor.test3"
|
sns3 = "sensor.test3"
|
||||||
sns4 = "sensor.test4"
|
sns4 = "sensor.test4"
|
||||||
|
sns5 = "sensor.wind_direction"
|
||||||
sns1_attr = {
|
sns1_attr = {
|
||||||
"device_class": "temperature",
|
"device_class": "temperature",
|
||||||
"state_class": "measurement",
|
"state_class": "measurement",
|
||||||
@ -302,6 +304,11 @@ def record_states(
|
|||||||
}
|
}
|
||||||
sns3_attr = {"device_class": "temperature"}
|
sns3_attr = {"device_class": "temperature"}
|
||||||
sns4_attr = {}
|
sns4_attr = {}
|
||||||
|
sns5_attr = {
|
||||||
|
"device_class": SensorDeviceClass.WIND_DIRECTION,
|
||||||
|
"state_class": SensorStateClass.MEASUREMENT_ANGLE,
|
||||||
|
"unit_of_measurement": DEGREE,
|
||||||
|
}
|
||||||
|
|
||||||
def set_state(entity_id, state, **kwargs):
|
def set_state(entity_id, state, **kwargs):
|
||||||
"""Set the state."""
|
"""Set the state."""
|
||||||
@ -315,7 +322,7 @@ def record_states(
|
|||||||
three = two + timedelta(seconds=30 * 5)
|
three = two + timedelta(seconds=30 * 5)
|
||||||
four = three + timedelta(seconds=14 * 5)
|
four = three + timedelta(seconds=14 * 5)
|
||||||
|
|
||||||
states = {mp: [], sns1: [], sns2: [], sns3: [], sns4: []}
|
states = {mp: [], sns1: [], sns2: [], sns3: [], sns4: [], sns5: []}
|
||||||
with freeze_time(one) as freezer:
|
with freeze_time(one) as freezer:
|
||||||
states[mp].append(
|
states[mp].append(
|
||||||
set_state(mp, "idle", attributes={"media_title": str(sentinel.mt1)})
|
set_state(mp, "idle", attributes={"media_title": str(sentinel.mt1)})
|
||||||
@ -324,6 +331,7 @@ def record_states(
|
|||||||
states[sns2].append(set_state(sns2, "10", attributes=sns2_attr))
|
states[sns2].append(set_state(sns2, "10", attributes=sns2_attr))
|
||||||
states[sns3].append(set_state(sns3, "10", attributes=sns3_attr))
|
states[sns3].append(set_state(sns3, "10", attributes=sns3_attr))
|
||||||
states[sns4].append(set_state(sns4, "10", attributes=sns4_attr))
|
states[sns4].append(set_state(sns4, "10", attributes=sns4_attr))
|
||||||
|
states[sns5].append(set_state(sns5, "10", attributes=sns5_attr))
|
||||||
|
|
||||||
freezer.move_to(one + timedelta(microseconds=1))
|
freezer.move_to(one + timedelta(microseconds=1))
|
||||||
states[mp].append(
|
states[mp].append(
|
||||||
@ -335,12 +343,14 @@ def record_states(
|
|||||||
states[sns2].append(set_state(sns2, "15", attributes=sns2_attr))
|
states[sns2].append(set_state(sns2, "15", attributes=sns2_attr))
|
||||||
states[sns3].append(set_state(sns3, "15", attributes=sns3_attr))
|
states[sns3].append(set_state(sns3, "15", attributes=sns3_attr))
|
||||||
states[sns4].append(set_state(sns4, "15", attributes=sns4_attr))
|
states[sns4].append(set_state(sns4, "15", attributes=sns4_attr))
|
||||||
|
states[sns5].append(set_state(sns5, "350", attributes=sns5_attr))
|
||||||
|
|
||||||
freezer.move_to(three)
|
freezer.move_to(three)
|
||||||
states[sns1].append(set_state(sns1, "20", attributes=sns1_attr))
|
states[sns1].append(set_state(sns1, "20", attributes=sns1_attr))
|
||||||
states[sns2].append(set_state(sns2, "20", attributes=sns2_attr))
|
states[sns2].append(set_state(sns2, "20", attributes=sns2_attr))
|
||||||
states[sns3].append(set_state(sns3, "20", attributes=sns3_attr))
|
states[sns3].append(set_state(sns3, "20", attributes=sns3_attr))
|
||||||
states[sns4].append(set_state(sns4, "20", attributes=sns4_attr))
|
states[sns4].append(set_state(sns4, "20", attributes=sns4_attr))
|
||||||
|
states[sns5].append(set_state(sns5, "5", attributes=sns5_attr))
|
||||||
|
|
||||||
return zero, four, states
|
return zero, four, states
|
||||||
|
|
||||||
|
@ -583,6 +583,8 @@ class StatisticsBase:
|
|||||||
last_reset_ts = Column(TIMESTAMP_TYPE)
|
last_reset_ts = Column(TIMESTAMP_TYPE)
|
||||||
state = Column(DOUBLE_TYPE)
|
state = Column(DOUBLE_TYPE)
|
||||||
sum = Column(DOUBLE_TYPE)
|
sum = Column(DOUBLE_TYPE)
|
||||||
|
# *** Not originally in v32, only added for tests. Added in v49
|
||||||
|
mean_weight = Column(DOUBLE_TYPE)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_stats(cls, metadata_id: int, stats: StatisticData) -> Self:
|
def from_stats(cls, metadata_id: int, stats: StatisticData) -> Self:
|
||||||
|
@ -1538,6 +1538,7 @@ async def test_stats_timestamp_conversion_is_reentrant(
|
|||||||
"last_reset_ts": one_year_ago.timestamp(),
|
"last_reset_ts": one_year_ago.timestamp(),
|
||||||
"max": None,
|
"max": None,
|
||||||
"mean": None,
|
"mean": None,
|
||||||
|
"mean_weight": None,
|
||||||
"metadata_id": 1000,
|
"metadata_id": 1000,
|
||||||
"min": None,
|
"min": None,
|
||||||
"start": process_timestamp(one_year_ago).replace(tzinfo=None),
|
"start": process_timestamp(one_year_ago).replace(tzinfo=None),
|
||||||
@ -1553,6 +1554,7 @@ async def test_stats_timestamp_conversion_is_reentrant(
|
|||||||
"last_reset_ts": six_months_ago.timestamp(),
|
"last_reset_ts": six_months_ago.timestamp(),
|
||||||
"max": None,
|
"max": None,
|
||||||
"mean": None,
|
"mean": None,
|
||||||
|
"mean_weight": None,
|
||||||
"metadata_id": 1000,
|
"metadata_id": 1000,
|
||||||
"min": None,
|
"min": None,
|
||||||
"start": None,
|
"start": None,
|
||||||
@ -1568,6 +1570,7 @@ async def test_stats_timestamp_conversion_is_reentrant(
|
|||||||
"last_reset_ts": one_month_ago.timestamp(),
|
"last_reset_ts": one_month_ago.timestamp(),
|
||||||
"max": None,
|
"max": None,
|
||||||
"mean": None,
|
"mean": None,
|
||||||
|
"mean_weight": None,
|
||||||
"metadata_id": 1000,
|
"metadata_id": 1000,
|
||||||
"min": None,
|
"min": None,
|
||||||
"start": process_timestamp(one_month_ago).replace(tzinfo=None),
|
"start": process_timestamp(one_month_ago).replace(tzinfo=None),
|
||||||
@ -1705,6 +1708,7 @@ async def test_stats_timestamp_with_one_by_one(
|
|||||||
"last_reset_ts": one_year_ago.timestamp(),
|
"last_reset_ts": one_year_ago.timestamp(),
|
||||||
"max": None,
|
"max": None,
|
||||||
"mean": None,
|
"mean": None,
|
||||||
|
"mean_weight": None,
|
||||||
"metadata_id": 1000,
|
"metadata_id": 1000,
|
||||||
"min": None,
|
"min": None,
|
||||||
"start": None,
|
"start": None,
|
||||||
@ -1720,6 +1724,7 @@ async def test_stats_timestamp_with_one_by_one(
|
|||||||
"last_reset_ts": six_months_ago.timestamp(),
|
"last_reset_ts": six_months_ago.timestamp(),
|
||||||
"max": None,
|
"max": None,
|
||||||
"mean": None,
|
"mean": None,
|
||||||
|
"mean_weight": None,
|
||||||
"metadata_id": 1000,
|
"metadata_id": 1000,
|
||||||
"min": None,
|
"min": None,
|
||||||
"start": None,
|
"start": None,
|
||||||
@ -1735,6 +1740,7 @@ async def test_stats_timestamp_with_one_by_one(
|
|||||||
"last_reset_ts": one_month_ago.timestamp(),
|
"last_reset_ts": one_month_ago.timestamp(),
|
||||||
"max": None,
|
"max": None,
|
||||||
"mean": None,
|
"mean": None,
|
||||||
|
"mean_weight": None,
|
||||||
"metadata_id": 1000,
|
"metadata_id": 1000,
|
||||||
"min": None,
|
"min": None,
|
||||||
"start": None,
|
"start": None,
|
||||||
@ -1758,6 +1764,7 @@ async def test_stats_timestamp_with_one_by_one(
|
|||||||
"last_reset_ts": one_year_ago.timestamp(),
|
"last_reset_ts": one_year_ago.timestamp(),
|
||||||
"max": None,
|
"max": None,
|
||||||
"mean": None,
|
"mean": None,
|
||||||
|
"mean_weight": None,
|
||||||
"metadata_id": 1000,
|
"metadata_id": 1000,
|
||||||
"min": None,
|
"min": None,
|
||||||
"start": None,
|
"start": None,
|
||||||
@ -1773,6 +1780,7 @@ async def test_stats_timestamp_with_one_by_one(
|
|||||||
"last_reset_ts": six_months_ago.timestamp(),
|
"last_reset_ts": six_months_ago.timestamp(),
|
||||||
"max": None,
|
"max": None,
|
||||||
"mean": None,
|
"mean": None,
|
||||||
|
"mean_weight": None,
|
||||||
"metadata_id": 1000,
|
"metadata_id": 1000,
|
||||||
"min": None,
|
"min": None,
|
||||||
"start": None,
|
"start": None,
|
||||||
@ -1788,6 +1796,7 @@ async def test_stats_timestamp_with_one_by_one(
|
|||||||
"last_reset_ts": one_month_ago.timestamp(),
|
"last_reset_ts": one_month_ago.timestamp(),
|
||||||
"max": None,
|
"max": None,
|
||||||
"mean": None,
|
"mean": None,
|
||||||
|
"mean_weight": None,
|
||||||
"metadata_id": 1000,
|
"metadata_id": 1000,
|
||||||
"min": None,
|
"min": None,
|
||||||
"start": None,
|
"start": None,
|
||||||
@ -1932,6 +1941,7 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates(
|
|||||||
"last_reset_ts": one_year_ago.timestamp(),
|
"last_reset_ts": one_year_ago.timestamp(),
|
||||||
"max": None,
|
"max": None,
|
||||||
"mean": None,
|
"mean": None,
|
||||||
|
"mean_weight": None,
|
||||||
"metadata_id": 1000,
|
"metadata_id": 1000,
|
||||||
"min": None,
|
"min": None,
|
||||||
"start": None,
|
"start": None,
|
||||||
@ -1947,6 +1957,7 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates(
|
|||||||
"last_reset_ts": six_months_ago.timestamp(),
|
"last_reset_ts": six_months_ago.timestamp(),
|
||||||
"max": None,
|
"max": None,
|
||||||
"mean": None,
|
"mean": None,
|
||||||
|
"mean_weight": None,
|
||||||
"metadata_id": 1000,
|
"metadata_id": 1000,
|
||||||
"min": None,
|
"min": None,
|
||||||
"start": None,
|
"start": None,
|
||||||
@ -1962,6 +1973,7 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates(
|
|||||||
"last_reset_ts": one_month_ago.timestamp(),
|
"last_reset_ts": one_month_ago.timestamp(),
|
||||||
"max": None,
|
"max": None,
|
||||||
"mean": None,
|
"mean": None,
|
||||||
|
"mean_weight": None,
|
||||||
"metadata_id": 1000,
|
"metadata_id": 1000,
|
||||||
"min": None,
|
"min": None,
|
||||||
"start": None,
|
"start": None,
|
||||||
@ -1985,6 +1997,7 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates(
|
|||||||
"last_reset_ts": six_months_ago.timestamp(),
|
"last_reset_ts": six_months_ago.timestamp(),
|
||||||
"max": None,
|
"max": None,
|
||||||
"mean": None,
|
"mean": None,
|
||||||
|
"mean_weight": None,
|
||||||
"metadata_id": 1000,
|
"metadata_id": 1000,
|
||||||
"min": None,
|
"min": None,
|
||||||
"start": None,
|
"start": None,
|
||||||
|
@ -12,6 +12,7 @@ from homeassistant.components import recorder
|
|||||||
from homeassistant.components.recorder import Recorder, history, statistics
|
from homeassistant.components.recorder import Recorder, history, statistics
|
||||||
from homeassistant.components.recorder.db_schema import StatisticsShortTerm
|
from homeassistant.components.recorder.db_schema import StatisticsShortTerm
|
||||||
from homeassistant.components.recorder.models import (
|
from homeassistant.components.recorder.models import (
|
||||||
|
StatisticMeanType,
|
||||||
datetime_to_timestamp_or_none,
|
datetime_to_timestamp_or_none,
|
||||||
process_timestamp,
|
process_timestamp,
|
||||||
)
|
)
|
||||||
@ -123,32 +124,38 @@ async def test_compile_hourly_statistics(
|
|||||||
stats = get_latest_short_term_statistics_with_session(
|
stats = get_latest_short_term_statistics_with_session(
|
||||||
hass,
|
hass,
|
||||||
session,
|
session,
|
||||||
{"sensor.test1"},
|
{"sensor.test1", "sensor.wind_direction"},
|
||||||
{"last_reset", "max", "mean", "min", "state", "sum"},
|
{"last_reset", "max", "mean", "min", "state", "sum"},
|
||||||
)
|
)
|
||||||
assert stats == {}
|
assert stats == {}
|
||||||
|
|
||||||
for kwargs in ({}, {"statistic_ids": ["sensor.test1"]}):
|
for kwargs in ({}, {"statistic_ids": ["sensor.test1", "sensor.wind_direction"]}):
|
||||||
stats = statistics_during_period(hass, zero, period="5minute", **kwargs)
|
stats = statistics_during_period(hass, zero, period="5minute", **kwargs)
|
||||||
assert stats == {}
|
assert stats == {}
|
||||||
stats = get_last_short_term_statistics(
|
for sensor in ("sensor.test1", "sensor.wind_direction"):
|
||||||
hass,
|
stats = get_last_short_term_statistics(
|
||||||
0,
|
hass,
|
||||||
"sensor.test1",
|
0,
|
||||||
True,
|
sensor,
|
||||||
{"last_reset", "max", "mean", "min", "state", "sum"},
|
True,
|
||||||
)
|
{"last_reset", "max", "mean", "min", "state", "sum"},
|
||||||
assert stats == {}
|
)
|
||||||
|
assert stats == {}
|
||||||
|
|
||||||
do_adhoc_statistics(hass, start=zero)
|
do_adhoc_statistics(hass, start=zero)
|
||||||
do_adhoc_statistics(hass, start=four)
|
do_adhoc_statistics(hass, start=four)
|
||||||
await async_wait_recording_done(hass)
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
metadata = get_metadata(hass, statistic_ids={"sensor.test1", "sensor.test2"})
|
metadata = get_metadata(
|
||||||
assert metadata["sensor.test1"][1]["has_mean"] is True
|
hass, statistic_ids={"sensor.test1", "sensor.test2", "sensor.wind_direction"}
|
||||||
assert metadata["sensor.test1"][1]["has_sum"] is False
|
)
|
||||||
assert metadata["sensor.test2"][1]["has_mean"] is True
|
for sensor, mean_type in (
|
||||||
assert metadata["sensor.test2"][1]["has_sum"] is False
|
("sensor.test1", StatisticMeanType.ARITHMETIC),
|
||||||
|
("sensor.test2", StatisticMeanType.ARITHMETIC),
|
||||||
|
("sensor.wind_direction", StatisticMeanType.CIRCULAR),
|
||||||
|
):
|
||||||
|
assert metadata[sensor][1]["mean_type"] is mean_type
|
||||||
|
assert metadata[sensor][1]["has_sum"] is False
|
||||||
expected_1 = {
|
expected_1 = {
|
||||||
"start": process_timestamp(zero).timestamp(),
|
"start": process_timestamp(zero).timestamp(),
|
||||||
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
|
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
|
||||||
@ -168,11 +175,39 @@ async def test_compile_hourly_statistics(
|
|||||||
expected_stats1 = [expected_1, expected_2]
|
expected_stats1 = [expected_1, expected_2]
|
||||||
expected_stats2 = [expected_1, expected_2]
|
expected_stats2 = [expected_1, expected_2]
|
||||||
|
|
||||||
|
expected_stats_wind_direction1 = {
|
||||||
|
"start": process_timestamp(zero).timestamp(),
|
||||||
|
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
|
||||||
|
"mean": pytest.approx(358.6387003873801),
|
||||||
|
"min": None,
|
||||||
|
"max": None,
|
||||||
|
"last_reset": None,
|
||||||
|
}
|
||||||
|
expected_stats_wind_direction2 = {
|
||||||
|
"start": process_timestamp(four).timestamp(),
|
||||||
|
"end": process_timestamp(four + timedelta(minutes=5)).timestamp(),
|
||||||
|
"mean": pytest.approx(5),
|
||||||
|
"min": None,
|
||||||
|
"max": None,
|
||||||
|
"last_reset": None,
|
||||||
|
}
|
||||||
|
expected_stats_wind_direction = [
|
||||||
|
expected_stats_wind_direction1,
|
||||||
|
expected_stats_wind_direction2,
|
||||||
|
]
|
||||||
|
|
||||||
# Test statistics_during_period
|
# Test statistics_during_period
|
||||||
stats = statistics_during_period(
|
stats = statistics_during_period(
|
||||||
hass, zero, period="5minute", statistic_ids={"sensor.test1", "sensor.test2"}
|
hass,
|
||||||
|
zero,
|
||||||
|
period="5minute",
|
||||||
|
statistic_ids={"sensor.test1", "sensor.test2", "sensor.wind_direction"},
|
||||||
)
|
)
|
||||||
assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2}
|
assert stats == {
|
||||||
|
"sensor.test1": expected_stats1,
|
||||||
|
"sensor.test2": expected_stats2,
|
||||||
|
"sensor.wind_direction": expected_stats_wind_direction,
|
||||||
|
}
|
||||||
|
|
||||||
# Test statistics_during_period with a far future start and end date
|
# Test statistics_during_period with a far future start and end date
|
||||||
future = dt_util.as_utc(dt_util.parse_datetime("2221-11-01 00:00:00"))
|
future = dt_util.as_utc(dt_util.parse_datetime("2221-11-01 00:00:00"))
|
||||||
@ -181,7 +216,7 @@ async def test_compile_hourly_statistics(
|
|||||||
future,
|
future,
|
||||||
end_time=future,
|
end_time=future,
|
||||||
period="5minute",
|
period="5minute",
|
||||||
statistic_ids={"sensor.test1", "sensor.test2"},
|
statistic_ids={"sensor.test1", "sensor.test2", "sensor.wind_direction"},
|
||||||
)
|
)
|
||||||
assert stats == {}
|
assert stats == {}
|
||||||
|
|
||||||
@ -191,9 +226,13 @@ async def test_compile_hourly_statistics(
|
|||||||
zero,
|
zero,
|
||||||
end_time=future,
|
end_time=future,
|
||||||
period="5minute",
|
period="5minute",
|
||||||
statistic_ids={"sensor.test1", "sensor.test2"},
|
statistic_ids={"sensor.test1", "sensor.test2", "sensor.wind_direction"},
|
||||||
)
|
)
|
||||||
assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2}
|
assert stats == {
|
||||||
|
"sensor.test1": expected_stats1,
|
||||||
|
"sensor.test2": expected_stats2,
|
||||||
|
"sensor.wind_direction": expected_stats_wind_direction,
|
||||||
|
}
|
||||||
|
|
||||||
stats = statistics_during_period(
|
stats = statistics_during_period(
|
||||||
hass, zero, statistic_ids={"sensor.test2"}, period="5minute"
|
hass, zero, statistic_ids={"sensor.test2"}, period="5minute"
|
||||||
@ -206,32 +245,39 @@ async def test_compile_hourly_statistics(
|
|||||||
assert stats == {}
|
assert stats == {}
|
||||||
|
|
||||||
# Test get_last_short_term_statistics and get_latest_short_term_statistics
|
# Test get_last_short_term_statistics and get_latest_short_term_statistics
|
||||||
stats = get_last_short_term_statistics(
|
for sensor, expected in (
|
||||||
hass,
|
("sensor.test1", expected_2),
|
||||||
0,
|
("sensor.wind_direction", expected_stats_wind_direction2),
|
||||||
"sensor.test1",
|
):
|
||||||
True,
|
stats = get_last_short_term_statistics(
|
||||||
{"last_reset", "max", "mean", "min", "state", "sum"},
|
hass,
|
||||||
)
|
0,
|
||||||
assert stats == {}
|
sensor,
|
||||||
|
True,
|
||||||
|
{"last_reset", "max", "mean", "min", "state", "sum"},
|
||||||
|
)
|
||||||
|
assert stats == {}
|
||||||
|
|
||||||
stats = get_last_short_term_statistics(
|
stats = get_last_short_term_statistics(
|
||||||
hass,
|
hass,
|
||||||
1,
|
1,
|
||||||
"sensor.test1",
|
sensor,
|
||||||
True,
|
True,
|
||||||
{"last_reset", "max", "mean", "min", "state", "sum"},
|
{"last_reset", "max", "mean", "min", "state", "sum"},
|
||||||
)
|
)
|
||||||
assert stats == {"sensor.test1": [expected_2]}
|
assert stats == {sensor: [expected]}
|
||||||
|
|
||||||
with session_scope(hass=hass, read_only=True) as session:
|
with session_scope(hass=hass, read_only=True) as session:
|
||||||
stats = get_latest_short_term_statistics_with_session(
|
stats = get_latest_short_term_statistics_with_session(
|
||||||
hass,
|
hass,
|
||||||
session,
|
session,
|
||||||
{"sensor.test1"},
|
{"sensor.test1", "sensor.wind_direction"},
|
||||||
{"last_reset", "max", "mean", "min", "state", "sum"},
|
{"last_reset", "max", "mean", "min", "state", "sum"},
|
||||||
)
|
)
|
||||||
assert stats == {"sensor.test1": [expected_2]}
|
assert stats == {
|
||||||
|
"sensor.test1": [expected_2],
|
||||||
|
"sensor.wind_direction": [expected_stats_wind_direction2],
|
||||||
|
}
|
||||||
|
|
||||||
# Now wipe the latest_short_term_statistics_ids table and test again
|
# Now wipe the latest_short_term_statistics_ids table and test again
|
||||||
# to make sure we can rebuild the missing data
|
# to make sure we can rebuild the missing data
|
||||||
@ -241,13 +287,15 @@ async def test_compile_hourly_statistics(
|
|||||||
stats = get_latest_short_term_statistics_with_session(
|
stats = get_latest_short_term_statistics_with_session(
|
||||||
hass,
|
hass,
|
||||||
session,
|
session,
|
||||||
{"sensor.test1"},
|
{"sensor.test1", "sensor.wind_direction"},
|
||||||
{"last_reset", "max", "mean", "min", "state", "sum"},
|
{"last_reset", "max", "mean", "min", "state", "sum"},
|
||||||
)
|
)
|
||||||
assert stats == {"sensor.test1": [expected_2]}
|
assert stats == {
|
||||||
|
"sensor.test1": [expected_2],
|
||||||
|
"sensor.wind_direction": [expected_stats_wind_direction2],
|
||||||
|
}
|
||||||
|
|
||||||
metadata = get_metadata(hass, statistic_ids={"sensor.test1"})
|
metadata = get_metadata(hass, statistic_ids={"sensor.test1"})
|
||||||
|
|
||||||
with session_scope(hass=hass, read_only=True) as session:
|
with session_scope(hass=hass, read_only=True) as session:
|
||||||
stats = get_latest_short_term_statistics_with_session(
|
stats = get_latest_short_term_statistics_with_session(
|
||||||
hass,
|
hass,
|
||||||
@ -258,23 +306,44 @@ async def test_compile_hourly_statistics(
|
|||||||
)
|
)
|
||||||
assert stats == {"sensor.test1": [expected_2]}
|
assert stats == {"sensor.test1": [expected_2]}
|
||||||
|
|
||||||
stats = get_last_short_term_statistics(
|
# Test with multiple metadata ids
|
||||||
hass,
|
metadata = get_metadata(
|
||||||
2,
|
hass, statistic_ids={"sensor.test1", "sensor.wind_direction"}
|
||||||
"sensor.test1",
|
|
||||||
True,
|
|
||||||
{"last_reset", "max", "mean", "min", "state", "sum"},
|
|
||||||
)
|
)
|
||||||
assert stats == {"sensor.test1": expected_stats1[::-1]}
|
with session_scope(hass=hass, read_only=True) as session:
|
||||||
|
stats = get_latest_short_term_statistics_with_session(
|
||||||
|
hass,
|
||||||
|
session,
|
||||||
|
{"sensor.test1", "sensor.wind_direction"},
|
||||||
|
{"last_reset", "max", "mean", "min", "state", "sum"},
|
||||||
|
metadata=metadata,
|
||||||
|
)
|
||||||
|
assert stats == {
|
||||||
|
"sensor.test1": [expected_2],
|
||||||
|
"sensor.wind_direction": [expected_stats_wind_direction2],
|
||||||
|
}
|
||||||
|
|
||||||
stats = get_last_short_term_statistics(
|
for sensor, expected in (
|
||||||
hass,
|
("sensor.test1", expected_stats1[::-1]),
|
||||||
3,
|
("sensor.wind_direction", expected_stats_wind_direction[::-1]),
|
||||||
"sensor.test1",
|
):
|
||||||
True,
|
stats = get_last_short_term_statistics(
|
||||||
{"last_reset", "max", "mean", "min", "state", "sum"},
|
hass,
|
||||||
)
|
2,
|
||||||
assert stats == {"sensor.test1": expected_stats1[::-1]}
|
sensor,
|
||||||
|
True,
|
||||||
|
{"last_reset", "max", "mean", "min", "state", "sum"},
|
||||||
|
)
|
||||||
|
assert stats == {sensor: expected}
|
||||||
|
|
||||||
|
stats = get_last_short_term_statistics(
|
||||||
|
hass,
|
||||||
|
3,
|
||||||
|
sensor,
|
||||||
|
True,
|
||||||
|
{"last_reset", "max", "mean", "min", "state", "sum"},
|
||||||
|
)
|
||||||
|
assert stats == {sensor: expected}
|
||||||
|
|
||||||
stats = get_last_short_term_statistics(
|
stats = get_last_short_term_statistics(
|
||||||
hass,
|
hass,
|
||||||
@ -291,7 +360,7 @@ async def test_compile_hourly_statistics(
|
|||||||
stats = get_latest_short_term_statistics_with_session(
|
stats = get_latest_short_term_statistics_with_session(
|
||||||
hass,
|
hass,
|
||||||
session,
|
session,
|
||||||
{"sensor.test1"},
|
{"sensor.test1", "sensor.wind_direction"},
|
||||||
{"last_reset", "max", "mean", "min", "state", "sum"},
|
{"last_reset", "max", "mean", "min", "state", "sum"},
|
||||||
)
|
)
|
||||||
assert stats == {}
|
assert stats == {}
|
||||||
@ -306,7 +375,7 @@ async def test_compile_hourly_statistics(
|
|||||||
stats = get_latest_short_term_statistics_with_session(
|
stats = get_latest_short_term_statistics_with_session(
|
||||||
hass,
|
hass,
|
||||||
session,
|
session,
|
||||||
{"sensor.test1"},
|
{"sensor.test1", "sensor.wind_direction"},
|
||||||
{"last_reset", "max", "mean", "min", "state", "sum"},
|
{"last_reset", "max", "mean", "min", "state", "sum"},
|
||||||
)
|
)
|
||||||
assert stats == {}
|
assert stats == {}
|
||||||
@ -460,15 +529,35 @@ async def test_rename_entity(
|
|||||||
expected_stats1 = [expected_1]
|
expected_stats1 = [expected_1]
|
||||||
expected_stats2 = [expected_1]
|
expected_stats2 = [expected_1]
|
||||||
expected_stats99 = [expected_1]
|
expected_stats99 = [expected_1]
|
||||||
|
expected_stats_wind_direction = [
|
||||||
|
{
|
||||||
|
"start": process_timestamp(zero).timestamp(),
|
||||||
|
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
|
||||||
|
"mean": pytest.approx(358.6387003873801),
|
||||||
|
"min": None,
|
||||||
|
"max": None,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": None,
|
||||||
|
"sum": None,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
stats = statistics_during_period(hass, zero, period="5minute")
|
stats = statistics_during_period(hass, zero, period="5minute")
|
||||||
assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2}
|
assert stats == {
|
||||||
|
"sensor.test1": expected_stats1,
|
||||||
|
"sensor.test2": expected_stats2,
|
||||||
|
"sensor.wind_direction": expected_stats_wind_direction,
|
||||||
|
}
|
||||||
|
|
||||||
entity_registry.async_update_entity("sensor.test1", new_entity_id="sensor.test99")
|
entity_registry.async_update_entity("sensor.test1", new_entity_id="sensor.test99")
|
||||||
await async_wait_recording_done(hass)
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
stats = statistics_during_period(hass, zero, period="5minute")
|
stats = statistics_during_period(hass, zero, period="5minute")
|
||||||
assert stats == {"sensor.test99": expected_stats99, "sensor.test2": expected_stats2}
|
assert stats == {
|
||||||
|
"sensor.test99": expected_stats99,
|
||||||
|
"sensor.test2": expected_stats2,
|
||||||
|
"sensor.wind_direction": expected_stats_wind_direction,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
async def test_statistics_during_period_set_back_compat(
|
async def test_statistics_during_period_set_back_compat(
|
||||||
@ -544,9 +633,25 @@ async def test_rename_entity_collision(
|
|||||||
}
|
}
|
||||||
expected_stats1 = [expected_1]
|
expected_stats1 = [expected_1]
|
||||||
expected_stats2 = [expected_1]
|
expected_stats2 = [expected_1]
|
||||||
|
expected_stats_wind_direction = [
|
||||||
|
{
|
||||||
|
"start": process_timestamp(zero).timestamp(),
|
||||||
|
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
|
||||||
|
"mean": pytest.approx(358.6387003873801),
|
||||||
|
"min": None,
|
||||||
|
"max": None,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": None,
|
||||||
|
"sum": None,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
stats = statistics_during_period(hass, zero, period="5minute")
|
stats = statistics_during_period(hass, zero, period="5minute")
|
||||||
assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2}
|
assert stats == {
|
||||||
|
"sensor.test1": expected_stats1,
|
||||||
|
"sensor.test2": expected_stats2,
|
||||||
|
"sensor.wind_direction": expected_stats_wind_direction,
|
||||||
|
}
|
||||||
|
|
||||||
# Insert metadata for sensor.test99
|
# Insert metadata for sensor.test99
|
||||||
metadata_1 = {
|
metadata_1 = {
|
||||||
@ -567,7 +672,11 @@ async def test_rename_entity_collision(
|
|||||||
|
|
||||||
# Statistics failed to migrate due to the collision
|
# Statistics failed to migrate due to the collision
|
||||||
stats = statistics_during_period(hass, zero, period="5minute")
|
stats = statistics_during_period(hass, zero, period="5minute")
|
||||||
assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2}
|
assert stats == {
|
||||||
|
"sensor.test1": expected_stats1,
|
||||||
|
"sensor.test2": expected_stats2,
|
||||||
|
"sensor.wind_direction": expected_stats_wind_direction,
|
||||||
|
}
|
||||||
|
|
||||||
# Verify the safeguard in the states meta manager was hit
|
# Verify the safeguard in the states meta manager was hit
|
||||||
assert (
|
assert (
|
||||||
@ -631,9 +740,25 @@ async def test_rename_entity_collision_states_meta_check_disabled(
|
|||||||
}
|
}
|
||||||
expected_stats1 = [expected_1]
|
expected_stats1 = [expected_1]
|
||||||
expected_stats2 = [expected_1]
|
expected_stats2 = [expected_1]
|
||||||
|
expected_stats_wind_direction = [
|
||||||
|
{
|
||||||
|
"start": process_timestamp(zero).timestamp(),
|
||||||
|
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
|
||||||
|
"mean": pytest.approx(358.6387003873801),
|
||||||
|
"min": None,
|
||||||
|
"max": None,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": None,
|
||||||
|
"sum": None,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
stats = statistics_during_period(hass, zero, period="5minute")
|
stats = statistics_during_period(hass, zero, period="5minute")
|
||||||
assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2}
|
assert stats == {
|
||||||
|
"sensor.test1": expected_stats1,
|
||||||
|
"sensor.test2": expected_stats2,
|
||||||
|
"sensor.wind_direction": expected_stats_wind_direction,
|
||||||
|
}
|
||||||
|
|
||||||
# Insert metadata for sensor.test99
|
# Insert metadata for sensor.test99
|
||||||
metadata_1 = {
|
metadata_1 = {
|
||||||
@ -660,7 +785,11 @@ async def test_rename_entity_collision_states_meta_check_disabled(
|
|||||||
|
|
||||||
# Statistics failed to migrate due to the collision
|
# Statistics failed to migrate due to the collision
|
||||||
stats = statistics_during_period(hass, zero, period="5minute")
|
stats = statistics_during_period(hass, zero, period="5minute")
|
||||||
assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2}
|
assert stats == {
|
||||||
|
"sensor.test1": expected_stats1,
|
||||||
|
"sensor.test2": expected_stats2,
|
||||||
|
"sensor.wind_direction": expected_stats_wind_direction,
|
||||||
|
}
|
||||||
|
|
||||||
# Verify the filter_unique_constraint_integrity_error safeguard was hit
|
# Verify the filter_unique_constraint_integrity_error safeguard was hit
|
||||||
assert "Blocked attempt to insert duplicated statistic rows" in caplog.text
|
assert "Blocked attempt to insert duplicated statistic rows" in caplog.text
|
||||||
@ -786,6 +915,7 @@ async def test_import_statistics(
|
|||||||
{
|
{
|
||||||
"display_unit_of_measurement": "kWh",
|
"display_unit_of_measurement": "kWh",
|
||||||
"has_mean": False,
|
"has_mean": False,
|
||||||
|
"mean_type": StatisticMeanType.NONE,
|
||||||
"has_sum": True,
|
"has_sum": True,
|
||||||
"statistic_id": statistic_id,
|
"statistic_id": statistic_id,
|
||||||
"name": "Total imported energy",
|
"name": "Total imported energy",
|
||||||
@ -800,6 +930,7 @@ async def test_import_statistics(
|
|||||||
1,
|
1,
|
||||||
{
|
{
|
||||||
"has_mean": False,
|
"has_mean": False,
|
||||||
|
"mean_type": StatisticMeanType.NONE,
|
||||||
"has_sum": True,
|
"has_sum": True,
|
||||||
"name": "Total imported energy",
|
"name": "Total imported energy",
|
||||||
"source": source,
|
"source": source,
|
||||||
@ -876,6 +1007,7 @@ async def test_import_statistics(
|
|||||||
{
|
{
|
||||||
"display_unit_of_measurement": "kWh",
|
"display_unit_of_measurement": "kWh",
|
||||||
"has_mean": False,
|
"has_mean": False,
|
||||||
|
"mean_type": StatisticMeanType.NONE,
|
||||||
"has_sum": True,
|
"has_sum": True,
|
||||||
"statistic_id": statistic_id,
|
"statistic_id": statistic_id,
|
||||||
"name": "Total imported energy renamed",
|
"name": "Total imported energy renamed",
|
||||||
@ -890,6 +1022,7 @@ async def test_import_statistics(
|
|||||||
1,
|
1,
|
||||||
{
|
{
|
||||||
"has_mean": False,
|
"has_mean": False,
|
||||||
|
"mean_type": StatisticMeanType.NONE,
|
||||||
"has_sum": True,
|
"has_sum": True,
|
||||||
"name": "Total imported energy renamed",
|
"name": "Total imported energy renamed",
|
||||||
"source": source,
|
"source": source,
|
||||||
|
@ -1,11 +1,14 @@
|
|||||||
"""The tests for sensor recorder platform."""
|
"""The tests for sensor recorder platform."""
|
||||||
|
|
||||||
|
from collections.abc import Iterable
|
||||||
import datetime
|
import datetime
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
|
import math
|
||||||
from statistics import fmean
|
from statistics import fmean
|
||||||
import sys
|
import sys
|
||||||
from unittest.mock import ANY, patch
|
from unittest.mock import ANY, patch
|
||||||
|
|
||||||
|
from _pytest.python_api import ApproxBase
|
||||||
from freezegun import freeze_time
|
from freezegun import freeze_time
|
||||||
from freezegun.api import FrozenDateTimeFactory
|
from freezegun.api import FrozenDateTimeFactory
|
||||||
import pytest
|
import pytest
|
||||||
@ -13,7 +16,14 @@ import pytest
|
|||||||
from homeassistant.components import recorder
|
from homeassistant.components import recorder
|
||||||
from homeassistant.components.recorder import Recorder
|
from homeassistant.components.recorder import Recorder
|
||||||
from homeassistant.components.recorder.db_schema import Statistics, StatisticsShortTerm
|
from homeassistant.components.recorder.db_schema import Statistics, StatisticsShortTerm
|
||||||
|
from homeassistant.components.recorder.models import (
|
||||||
|
StatisticData,
|
||||||
|
StatisticMeanType,
|
||||||
|
StatisticMetaData,
|
||||||
|
)
|
||||||
from homeassistant.components.recorder.statistics import (
|
from homeassistant.components.recorder.statistics import (
|
||||||
|
DEG_TO_RAD,
|
||||||
|
RAD_TO_DEG,
|
||||||
async_add_external_statistics,
|
async_add_external_statistics,
|
||||||
get_last_statistics,
|
get_last_statistics,
|
||||||
get_latest_short_term_statistics_with_session,
|
get_latest_short_term_statistics_with_session,
|
||||||
@ -24,6 +34,7 @@ from homeassistant.components.recorder.statistics import (
|
|||||||
from homeassistant.components.recorder.util import session_scope
|
from homeassistant.components.recorder.util import session_scope
|
||||||
from homeassistant.components.recorder.websocket_api import UNIT_SCHEMA
|
from homeassistant.components.recorder.websocket_api import UNIT_SCHEMA
|
||||||
from homeassistant.components.sensor import UNIT_CONVERTERS
|
from homeassistant.components.sensor import UNIT_CONVERTERS
|
||||||
|
from homeassistant.const import DEGREE
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import recorder as recorder_helper
|
from homeassistant.helpers import recorder as recorder_helper
|
||||||
from homeassistant.setup import async_setup_component
|
from homeassistant.setup import async_setup_component
|
||||||
@ -247,12 +258,12 @@ async def test_statistics_during_period(
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.freeze_time(datetime.datetime(2022, 10, 21, 7, 25, tzinfo=datetime.UTC))
|
@pytest.mark.freeze_time(datetime.datetime(2022, 10, 21, 7, 25, tzinfo=datetime.UTC))
|
||||||
|
@pytest.mark.usefixtures("recorder_mock")
|
||||||
@pytest.mark.parametrize("offset", [0, 1, 2])
|
@pytest.mark.parametrize("offset", [0, 1, 2])
|
||||||
async def test_statistic_during_period(
|
async def test_statistic_during_period(
|
||||||
recorder_mock: Recorder,
|
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
hass_ws_client: WebSocketGenerator,
|
hass_ws_client: WebSocketGenerator,
|
||||||
offset,
|
offset: int,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test statistic_during_period."""
|
"""Test statistic_during_period."""
|
||||||
now = dt_util.utcnow()
|
now = dt_util.utcnow()
|
||||||
@ -307,7 +318,7 @@ async def test_statistic_during_period(
|
|||||||
)
|
)
|
||||||
|
|
||||||
imported_metadata = {
|
imported_metadata = {
|
||||||
"has_mean": False,
|
"has_mean": True,
|
||||||
"has_sum": True,
|
"has_sum": True,
|
||||||
"name": "Total imported energy",
|
"name": "Total imported energy",
|
||||||
"source": "recorder",
|
"source": "recorder",
|
||||||
@ -655,7 +666,7 @@ async def test_statistic_during_period(
|
|||||||
hass,
|
hass,
|
||||||
session,
|
session,
|
||||||
{"sensor.test"},
|
{"sensor.test"},
|
||||||
{"last_reset", "max", "mean", "min", "state", "sum"},
|
{"last_reset", "state", "sum"},
|
||||||
)
|
)
|
||||||
start = imported_stats_5min[-1]["start"].timestamp()
|
start = imported_stats_5min[-1]["start"].timestamp()
|
||||||
end = start + (5 * 60)
|
end = start + (5 * 60)
|
||||||
@ -672,18 +683,376 @@ async def test_statistic_during_period(
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _circular_mean(values: Iterable[StatisticData]) -> dict[str, float]:
|
||||||
|
sin_sum = 0
|
||||||
|
cos_sum = 0
|
||||||
|
for x in values:
|
||||||
|
mean = x.get("mean")
|
||||||
|
assert mean is not None
|
||||||
|
sin_sum += math.sin(mean * DEG_TO_RAD)
|
||||||
|
cos_sum += math.cos(mean * DEG_TO_RAD)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"mean": (RAD_TO_DEG * math.atan2(sin_sum, cos_sum)) % 360,
|
||||||
|
"mean_weight": math.sqrt(sin_sum**2 + cos_sum**2),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _circular_mean_approx(values: Iterable[StatisticData]) -> ApproxBase:
|
||||||
|
return pytest.approx(_circular_mean(values)["mean"])
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.freeze_time(datetime.datetime(2022, 10, 21, 7, 25, tzinfo=datetime.UTC))
|
||||||
|
@pytest.mark.usefixtures("recorder_mock")
|
||||||
|
@pytest.mark.parametrize("offset", [0, 1, 2])
|
||||||
|
async def test_statistic_during_period_circular_mean(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
hass_ws_client: WebSocketGenerator,
|
||||||
|
offset: int,
|
||||||
|
) -> None:
|
||||||
|
"""Test statistic_during_period."""
|
||||||
|
now = dt_util.utcnow()
|
||||||
|
|
||||||
|
await async_recorder_block_till_done(hass)
|
||||||
|
client = await hass_ws_client()
|
||||||
|
|
||||||
|
zero = now
|
||||||
|
start = zero.replace(minute=offset * 5, second=0, microsecond=0) + timedelta(
|
||||||
|
hours=-3
|
||||||
|
)
|
||||||
|
|
||||||
|
imported_stats_5min: list[StatisticData] = [
|
||||||
|
{
|
||||||
|
"start": (start + timedelta(minutes=5 * i)),
|
||||||
|
"mean": (123.456 * i) % 360,
|
||||||
|
"mean_weight": 1,
|
||||||
|
}
|
||||||
|
for i in range(39)
|
||||||
|
]
|
||||||
|
|
||||||
|
imported_stats = []
|
||||||
|
slice_end = 12 - offset
|
||||||
|
imported_stats.append(
|
||||||
|
{
|
||||||
|
"start": imported_stats_5min[0]["start"].replace(minute=0),
|
||||||
|
**_circular_mean(imported_stats_5min[0:slice_end]),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
for i in range(2):
|
||||||
|
slice_start = i * 12 + (12 - offset)
|
||||||
|
slice_end = (i + 1) * 12 + (12 - offset)
|
||||||
|
assert imported_stats_5min[slice_start]["start"].minute == 0
|
||||||
|
imported_stats.append(
|
||||||
|
{
|
||||||
|
"start": imported_stats_5min[slice_start]["start"],
|
||||||
|
**_circular_mean(imported_stats_5min[slice_start:slice_end]),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
imported_metadata: StatisticMetaData = {
|
||||||
|
"mean_type": StatisticMeanType.CIRCULAR,
|
||||||
|
"has_sum": False,
|
||||||
|
"name": "Wind direction",
|
||||||
|
"source": "recorder",
|
||||||
|
"statistic_id": "sensor.test",
|
||||||
|
"unit_of_measurement": DEGREE,
|
||||||
|
}
|
||||||
|
|
||||||
|
recorder.get_instance(hass).async_import_statistics(
|
||||||
|
imported_metadata,
|
||||||
|
imported_stats,
|
||||||
|
Statistics,
|
||||||
|
)
|
||||||
|
recorder.get_instance(hass).async_import_statistics(
|
||||||
|
imported_metadata,
|
||||||
|
imported_stats_5min,
|
||||||
|
StatisticsShortTerm,
|
||||||
|
)
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
|
metadata = get_metadata(hass, statistic_ids={"sensor.test"})
|
||||||
|
metadata_id = metadata["sensor.test"][0]
|
||||||
|
run_cache = get_short_term_statistics_run_cache(hass)
|
||||||
|
# Verify the import of the short term statistics
|
||||||
|
# also updates the run cache
|
||||||
|
assert run_cache.get_latest_ids({metadata_id}) is not None
|
||||||
|
|
||||||
|
# No data for this period yet
|
||||||
|
await client.send_json_auto_id(
|
||||||
|
{
|
||||||
|
"type": "recorder/statistic_during_period",
|
||||||
|
"fixed_period": {
|
||||||
|
"start_time": now.isoformat(),
|
||||||
|
"end_time": now.isoformat(),
|
||||||
|
},
|
||||||
|
"statistic_id": "sensor.test",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {
|
||||||
|
"max": None,
|
||||||
|
"mean": None,
|
||||||
|
"min": None,
|
||||||
|
"change": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
# This should include imported_statistics_5min[:]
|
||||||
|
await client.send_json_auto_id(
|
||||||
|
{
|
||||||
|
"type": "recorder/statistic_during_period",
|
||||||
|
"statistic_id": "sensor.test",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {
|
||||||
|
"mean": _circular_mean_approx(imported_stats_5min),
|
||||||
|
"max": None,
|
||||||
|
"min": None,
|
||||||
|
"change": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
# This should also include imported_statistics_5min[:]
|
||||||
|
start_time = (
|
||||||
|
dt_util.parse_datetime("2022-10-21T04:00:00+00:00")
|
||||||
|
+ timedelta(minutes=5 * offset)
|
||||||
|
).isoformat()
|
||||||
|
end_time = (
|
||||||
|
dt_util.parse_datetime("2022-10-21T07:15:00+00:00")
|
||||||
|
+ timedelta(minutes=5 * offset)
|
||||||
|
).isoformat()
|
||||||
|
await client.send_json_auto_id(
|
||||||
|
{
|
||||||
|
"type": "recorder/statistic_during_period",
|
||||||
|
"statistic_id": "sensor.test",
|
||||||
|
"fixed_period": {
|
||||||
|
"start_time": start_time,
|
||||||
|
"end_time": end_time,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {
|
||||||
|
"mean": _circular_mean_approx(imported_stats_5min),
|
||||||
|
"max": None,
|
||||||
|
"min": None,
|
||||||
|
"change": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
# This should also include imported_statistics_5min[:]
|
||||||
|
start_time = (
|
||||||
|
dt_util.parse_datetime("2022-10-21T04:00:00+00:00")
|
||||||
|
+ timedelta(minutes=5 * offset)
|
||||||
|
).isoformat()
|
||||||
|
end_time = (
|
||||||
|
dt_util.parse_datetime("2022-10-21T08:20:00+00:00")
|
||||||
|
+ timedelta(minutes=5 * offset)
|
||||||
|
).isoformat()
|
||||||
|
await client.send_json_auto_id(
|
||||||
|
{
|
||||||
|
"type": "recorder/statistic_during_period",
|
||||||
|
"statistic_id": "sensor.test",
|
||||||
|
"fixed_period": {
|
||||||
|
"start_time": start_time,
|
||||||
|
"end_time": end_time,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {
|
||||||
|
"mean": _circular_mean_approx(imported_stats_5min),
|
||||||
|
"max": None,
|
||||||
|
"min": None,
|
||||||
|
"change": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
# This should include imported_statistics_5min[26:]
|
||||||
|
start_time = (
|
||||||
|
dt_util.parse_datetime("2022-10-21T06:10:00+00:00")
|
||||||
|
+ timedelta(minutes=5 * offset)
|
||||||
|
).isoformat()
|
||||||
|
assert imported_stats_5min[26]["start"].isoformat() == start_time
|
||||||
|
await client.send_json_auto_id(
|
||||||
|
{
|
||||||
|
"type": "recorder/statistic_during_period",
|
||||||
|
"fixed_period": {
|
||||||
|
"start_time": start_time,
|
||||||
|
},
|
||||||
|
"statistic_id": "sensor.test",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {
|
||||||
|
"mean": _circular_mean_approx(imported_stats_5min[26:]),
|
||||||
|
"max": None,
|
||||||
|
"min": None,
|
||||||
|
"change": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
# This should also include imported_statistics_5min[26:]
|
||||||
|
start_time = (
|
||||||
|
dt_util.parse_datetime("2022-10-21T06:09:00+00:00")
|
||||||
|
+ timedelta(minutes=5 * offset)
|
||||||
|
).isoformat()
|
||||||
|
await client.send_json_auto_id(
|
||||||
|
{
|
||||||
|
"type": "recorder/statistic_during_period",
|
||||||
|
"fixed_period": {
|
||||||
|
"start_time": start_time,
|
||||||
|
},
|
||||||
|
"statistic_id": "sensor.test",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {
|
||||||
|
"mean": _circular_mean_approx(imported_stats_5min[26:]),
|
||||||
|
"max": None,
|
||||||
|
"min": None,
|
||||||
|
"change": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
# This should include imported_statistics_5min[:26]
|
||||||
|
end_time = (
|
||||||
|
dt_util.parse_datetime("2022-10-21T06:10:00+00:00")
|
||||||
|
+ timedelta(minutes=5 * offset)
|
||||||
|
).isoformat()
|
||||||
|
assert imported_stats_5min[26]["start"].isoformat() == end_time
|
||||||
|
await client.send_json_auto_id(
|
||||||
|
{
|
||||||
|
"type": "recorder/statistic_during_period",
|
||||||
|
"fixed_period": {
|
||||||
|
"end_time": end_time,
|
||||||
|
},
|
||||||
|
"statistic_id": "sensor.test",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {
|
||||||
|
"mean": _circular_mean_approx(imported_stats_5min[:26]),
|
||||||
|
"max": None,
|
||||||
|
"min": None,
|
||||||
|
"change": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
# This should include imported_statistics_5min[26:32] (less than a full hour)
|
||||||
|
start_time = (
|
||||||
|
dt_util.parse_datetime("2022-10-21T06:10:00+00:00")
|
||||||
|
+ timedelta(minutes=5 * offset)
|
||||||
|
).isoformat()
|
||||||
|
assert imported_stats_5min[26]["start"].isoformat() == start_time
|
||||||
|
end_time = (
|
||||||
|
dt_util.parse_datetime("2022-10-21T06:40:00+00:00")
|
||||||
|
+ timedelta(minutes=5 * offset)
|
||||||
|
).isoformat()
|
||||||
|
assert imported_stats_5min[32]["start"].isoformat() == end_time
|
||||||
|
await client.send_json_auto_id(
|
||||||
|
{
|
||||||
|
"type": "recorder/statistic_during_period",
|
||||||
|
"fixed_period": {
|
||||||
|
"start_time": start_time,
|
||||||
|
"end_time": end_time,
|
||||||
|
},
|
||||||
|
"statistic_id": "sensor.test",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {
|
||||||
|
"mean": _circular_mean_approx(imported_stats_5min[26:32]),
|
||||||
|
"max": None,
|
||||||
|
"min": None,
|
||||||
|
"change": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
# This should include imported_statistics[2:] + imported_statistics_5min[36:]
|
||||||
|
start_time = "2022-10-21T06:00:00+00:00"
|
||||||
|
assert imported_stats_5min[24 - offset]["start"].isoformat() == start_time
|
||||||
|
assert imported_stats[2]["start"].isoformat() == start_time
|
||||||
|
await client.send_json_auto_id(
|
||||||
|
{
|
||||||
|
"type": "recorder/statistic_during_period",
|
||||||
|
"fixed_period": {
|
||||||
|
"start_time": start_time,
|
||||||
|
},
|
||||||
|
"statistic_id": "sensor.test",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {
|
||||||
|
"mean": _circular_mean_approx(imported_stats_5min[24 - offset :]),
|
||||||
|
"max": None,
|
||||||
|
"min": None,
|
||||||
|
"change": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
# This should also include imported_statistics[2:] + imported_statistics_5min[36:]
|
||||||
|
await client.send_json_auto_id(
|
||||||
|
{
|
||||||
|
"type": "recorder/statistic_during_period",
|
||||||
|
"rolling_window": {
|
||||||
|
"duration": {"hours": 1, "minutes": 25},
|
||||||
|
},
|
||||||
|
"statistic_id": "sensor.test",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {
|
||||||
|
"mean": _circular_mean_approx(imported_stats_5min[24 - offset :]),
|
||||||
|
"max": None,
|
||||||
|
"min": None,
|
||||||
|
"change": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
# This should include imported_statistics[2:3]
|
||||||
|
await client.send_json_auto_id(
|
||||||
|
{
|
||||||
|
"type": "recorder/statistic_during_period",
|
||||||
|
"rolling_window": {
|
||||||
|
"duration": {"hours": 1},
|
||||||
|
"offset": {"minutes": -25},
|
||||||
|
},
|
||||||
|
"statistic_id": "sensor.test",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
slice_start = 24 - offset
|
||||||
|
slice_end = 36 - offset
|
||||||
|
assert response["result"] == {
|
||||||
|
"mean": _circular_mean_approx(imported_stats_5min[slice_start:slice_end]),
|
||||||
|
"max": None,
|
||||||
|
"min": None,
|
||||||
|
"change": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Test we can get only selected types
|
||||||
|
await client.send_json_auto_id(
|
||||||
|
{
|
||||||
|
"type": "recorder/statistic_during_period",
|
||||||
|
"statistic_id": "sensor.test",
|
||||||
|
"types": ["mean"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {
|
||||||
|
"mean": _circular_mean_approx(imported_stats_5min),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.freeze_time(datetime.datetime(2022, 10, 21, 7, 25, tzinfo=datetime.UTC))
|
@pytest.mark.freeze_time(datetime.datetime(2022, 10, 21, 7, 25, tzinfo=datetime.UTC))
|
||||||
async def test_statistic_during_period_hole(
|
async def test_statistic_during_period_hole(
|
||||||
recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test statistic_during_period when there are holes in the data."""
|
"""Test statistic_during_period when there are holes in the data."""
|
||||||
stat_id = 1
|
|
||||||
|
|
||||||
def next_id():
|
|
||||||
nonlocal stat_id
|
|
||||||
stat_id += 1
|
|
||||||
return stat_id
|
|
||||||
|
|
||||||
now = dt_util.utcnow()
|
now = dt_util.utcnow()
|
||||||
|
|
||||||
await async_recorder_block_till_done(hass)
|
await async_recorder_block_till_done(hass)
|
||||||
@ -704,7 +1073,7 @@ async def test_statistic_during_period_hole(
|
|||||||
]
|
]
|
||||||
|
|
||||||
imported_metadata = {
|
imported_metadata = {
|
||||||
"has_mean": False,
|
"has_mean": True,
|
||||||
"has_sum": True,
|
"has_sum": True,
|
||||||
"name": "Total imported energy",
|
"name": "Total imported energy",
|
||||||
"source": "recorder",
|
"source": "recorder",
|
||||||
@ -830,6 +1199,156 @@ async def test_statistic_during_period_hole(
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.freeze_time(datetime.datetime(2022, 10, 21, 7, 25, tzinfo=datetime.UTC))
|
||||||
|
@pytest.mark.usefixtures("recorder_mock")
|
||||||
|
async def test_statistic_during_period_hole_circular_mean(
|
||||||
|
hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||||
|
) -> None:
|
||||||
|
"""Test statistic_during_period when there are holes in the data."""
|
||||||
|
now = dt_util.utcnow()
|
||||||
|
|
||||||
|
await async_recorder_block_till_done(hass)
|
||||||
|
client = await hass_ws_client()
|
||||||
|
|
||||||
|
zero = now
|
||||||
|
start = zero.replace(minute=0, second=0, microsecond=0) + timedelta(hours=-18)
|
||||||
|
|
||||||
|
imported_stats: list[StatisticData] = [
|
||||||
|
{
|
||||||
|
"start": (start + timedelta(hours=3 * i)),
|
||||||
|
"mean": (123.456 * i) % 360,
|
||||||
|
"mean_weight": 1,
|
||||||
|
}
|
||||||
|
for i in range(6)
|
||||||
|
]
|
||||||
|
|
||||||
|
imported_metadata: StatisticMetaData = {
|
||||||
|
"mean_type": StatisticMeanType.CIRCULAR,
|
||||||
|
"has_sum": False,
|
||||||
|
"name": "Wind direction",
|
||||||
|
"source": "recorder",
|
||||||
|
"statistic_id": "sensor.test",
|
||||||
|
"unit_of_measurement": DEGREE,
|
||||||
|
}
|
||||||
|
|
||||||
|
recorder.get_instance(hass).async_import_statistics(
|
||||||
|
imported_metadata,
|
||||||
|
imported_stats,
|
||||||
|
Statistics,
|
||||||
|
)
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
|
# This should include imported_stats[:]
|
||||||
|
await client.send_json_auto_id(
|
||||||
|
{
|
||||||
|
"type": "recorder/statistic_during_period",
|
||||||
|
"statistic_id": "sensor.test",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {
|
||||||
|
"mean": _circular_mean_approx(imported_stats[:]),
|
||||||
|
"max": None,
|
||||||
|
"min": None,
|
||||||
|
"change": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
# This should also include imported_stats[:]
|
||||||
|
start_time = "2022-10-20T13:00:00+00:00"
|
||||||
|
end_time = "2022-10-21T05:00:00+00:00"
|
||||||
|
assert imported_stats[0]["start"].isoformat() == start_time
|
||||||
|
assert imported_stats[-1]["start"].isoformat() < end_time
|
||||||
|
await client.send_json_auto_id(
|
||||||
|
{
|
||||||
|
"type": "recorder/statistic_during_period",
|
||||||
|
"statistic_id": "sensor.test",
|
||||||
|
"fixed_period": {
|
||||||
|
"start_time": start_time,
|
||||||
|
"end_time": end_time,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {
|
||||||
|
"mean": _circular_mean_approx(imported_stats[:]),
|
||||||
|
"max": None,
|
||||||
|
"min": None,
|
||||||
|
"change": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
# This should also include imported_stats[:]
|
||||||
|
start_time = "2022-10-20T13:00:00+00:00"
|
||||||
|
end_time = "2022-10-21T08:20:00+00:00"
|
||||||
|
await client.send_json_auto_id(
|
||||||
|
{
|
||||||
|
"type": "recorder/statistic_during_period",
|
||||||
|
"statistic_id": "sensor.test",
|
||||||
|
"fixed_period": {
|
||||||
|
"start_time": start_time,
|
||||||
|
"end_time": end_time,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {
|
||||||
|
"mean": _circular_mean_approx(imported_stats[:]),
|
||||||
|
"max": None,
|
||||||
|
"min": None,
|
||||||
|
"change": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
# This should include imported_stats[1:4]
|
||||||
|
start_time = "2022-10-20T16:00:00+00:00"
|
||||||
|
end_time = "2022-10-20T23:00:00+00:00"
|
||||||
|
assert imported_stats[1]["start"].isoformat() == start_time
|
||||||
|
assert imported_stats[3]["start"].isoformat() < end_time
|
||||||
|
await client.send_json_auto_id(
|
||||||
|
{
|
||||||
|
"type": "recorder/statistic_during_period",
|
||||||
|
"statistic_id": "sensor.test",
|
||||||
|
"fixed_period": {
|
||||||
|
"start_time": start_time,
|
||||||
|
"end_time": end_time,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {
|
||||||
|
"mean": _circular_mean_approx(imported_stats[1:4]),
|
||||||
|
"max": None,
|
||||||
|
"min": None,
|
||||||
|
"change": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
# This should also include imported_stats[1:4]
|
||||||
|
start_time = "2022-10-20T15:00:00+00:00"
|
||||||
|
end_time = "2022-10-21T00:00:00+00:00"
|
||||||
|
assert imported_stats[1]["start"].isoformat() > start_time
|
||||||
|
assert imported_stats[3]["start"].isoformat() < end_time
|
||||||
|
await client.send_json_auto_id(
|
||||||
|
{
|
||||||
|
"type": "recorder/statistic_during_period",
|
||||||
|
"statistic_id": "sensor.test",
|
||||||
|
"fixed_period": {
|
||||||
|
"start_time": start_time,
|
||||||
|
"end_time": end_time,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {
|
||||||
|
"mean": _circular_mean_approx(imported_stats[1:4]),
|
||||||
|
"max": None,
|
||||||
|
"min": None,
|
||||||
|
"change": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"frozen_time",
|
"frozen_time",
|
||||||
[
|
[
|
||||||
@ -897,7 +1416,7 @@ async def test_statistic_during_period_partial_overlap(
|
|||||||
|
|
||||||
statId = "sensor.test_overlapping"
|
statId = "sensor.test_overlapping"
|
||||||
imported_metadata = {
|
imported_metadata = {
|
||||||
"has_mean": False,
|
"has_mean": True,
|
||||||
"has_sum": True,
|
"has_sum": True,
|
||||||
"name": "Total imported energy overlapping",
|
"name": "Total imported energy overlapping",
|
||||||
"source": "recorder",
|
"source": "recorder",
|
||||||
@ -1766,6 +2285,7 @@ async def test_list_statistic_ids(
|
|||||||
"""Test list_statistic_ids."""
|
"""Test list_statistic_ids."""
|
||||||
now = get_start_time(dt_util.utcnow())
|
now = get_start_time(dt_util.utcnow())
|
||||||
has_mean = attributes["state_class"] == "measurement"
|
has_mean = attributes["state_class"] == "measurement"
|
||||||
|
mean_type = StatisticMeanType.ARITHMETIC if has_mean else StatisticMeanType.NONE
|
||||||
has_sum = not has_mean
|
has_sum = not has_mean
|
||||||
|
|
||||||
hass.config.units = units
|
hass.config.units = units
|
||||||
@ -1791,6 +2311,7 @@ async def test_list_statistic_ids(
|
|||||||
"statistic_id": "sensor.test",
|
"statistic_id": "sensor.test",
|
||||||
"display_unit_of_measurement": display_unit,
|
"display_unit_of_measurement": display_unit,
|
||||||
"has_mean": has_mean,
|
"has_mean": has_mean,
|
||||||
|
"mean_type": mean_type,
|
||||||
"has_sum": has_sum,
|
"has_sum": has_sum,
|
||||||
"name": None,
|
"name": None,
|
||||||
"source": "recorder",
|
"source": "recorder",
|
||||||
@ -1813,6 +2334,7 @@ async def test_list_statistic_ids(
|
|||||||
"statistic_id": "sensor.test",
|
"statistic_id": "sensor.test",
|
||||||
"display_unit_of_measurement": display_unit,
|
"display_unit_of_measurement": display_unit,
|
||||||
"has_mean": has_mean,
|
"has_mean": has_mean,
|
||||||
|
"mean_type": mean_type,
|
||||||
"has_sum": has_sum,
|
"has_sum": has_sum,
|
||||||
"name": None,
|
"name": None,
|
||||||
"source": "recorder",
|
"source": "recorder",
|
||||||
@ -1838,6 +2360,7 @@ async def test_list_statistic_ids(
|
|||||||
"statistic_id": "sensor.test",
|
"statistic_id": "sensor.test",
|
||||||
"display_unit_of_measurement": display_unit,
|
"display_unit_of_measurement": display_unit,
|
||||||
"has_mean": has_mean,
|
"has_mean": has_mean,
|
||||||
|
"mean_type": mean_type,
|
||||||
"has_sum": has_sum,
|
"has_sum": has_sum,
|
||||||
"name": None,
|
"name": None,
|
||||||
"source": "recorder",
|
"source": "recorder",
|
||||||
@ -1859,6 +2382,7 @@ async def test_list_statistic_ids(
|
|||||||
"statistic_id": "sensor.test",
|
"statistic_id": "sensor.test",
|
||||||
"display_unit_of_measurement": display_unit,
|
"display_unit_of_measurement": display_unit,
|
||||||
"has_mean": has_mean,
|
"has_mean": has_mean,
|
||||||
|
"mean_type": mean_type,
|
||||||
"has_sum": has_sum,
|
"has_sum": has_sum,
|
||||||
"name": None,
|
"name": None,
|
||||||
"source": "recorder",
|
"source": "recorder",
|
||||||
@ -1939,6 +2463,7 @@ async def test_list_statistic_ids_unit_change(
|
|||||||
"""Test list_statistic_ids."""
|
"""Test list_statistic_ids."""
|
||||||
now = get_start_time(dt_util.utcnow())
|
now = get_start_time(dt_util.utcnow())
|
||||||
has_mean = attributes["state_class"] == "measurement"
|
has_mean = attributes["state_class"] == "measurement"
|
||||||
|
mean_type = StatisticMeanType.ARITHMETIC if has_mean else StatisticMeanType.NONE
|
||||||
has_sum = not has_mean
|
has_sum = not has_mean
|
||||||
|
|
||||||
await async_setup_component(hass, "sensor", {})
|
await async_setup_component(hass, "sensor", {})
|
||||||
@ -1966,6 +2491,7 @@ async def test_list_statistic_ids_unit_change(
|
|||||||
"statistic_id": "sensor.test",
|
"statistic_id": "sensor.test",
|
||||||
"display_unit_of_measurement": statistics_unit,
|
"display_unit_of_measurement": statistics_unit,
|
||||||
"has_mean": has_mean,
|
"has_mean": has_mean,
|
||||||
|
"mean_type": mean_type,
|
||||||
"has_sum": has_sum,
|
"has_sum": has_sum,
|
||||||
"name": None,
|
"name": None,
|
||||||
"source": "recorder",
|
"source": "recorder",
|
||||||
@ -1987,6 +2513,7 @@ async def test_list_statistic_ids_unit_change(
|
|||||||
"statistic_id": "sensor.test",
|
"statistic_id": "sensor.test",
|
||||||
"display_unit_of_measurement": display_unit,
|
"display_unit_of_measurement": display_unit,
|
||||||
"has_mean": has_mean,
|
"has_mean": has_mean,
|
||||||
|
"mean_type": mean_type,
|
||||||
"has_sum": has_sum,
|
"has_sum": has_sum,
|
||||||
"name": None,
|
"name": None,
|
||||||
"source": "recorder",
|
"source": "recorder",
|
||||||
@ -2208,6 +2735,7 @@ async def test_update_statistics_metadata(
|
|||||||
"statistic_id": "sensor.test",
|
"statistic_id": "sensor.test",
|
||||||
"display_unit_of_measurement": "kW",
|
"display_unit_of_measurement": "kW",
|
||||||
"has_mean": True,
|
"has_mean": True,
|
||||||
|
"mean_type": StatisticMeanType.ARITHMETIC,
|
||||||
"has_sum": False,
|
"has_sum": False,
|
||||||
"name": None,
|
"name": None,
|
||||||
"source": "recorder",
|
"source": "recorder",
|
||||||
@ -2235,6 +2763,7 @@ async def test_update_statistics_metadata(
|
|||||||
"statistic_id": "sensor.test",
|
"statistic_id": "sensor.test",
|
||||||
"display_unit_of_measurement": new_display_unit,
|
"display_unit_of_measurement": new_display_unit,
|
||||||
"has_mean": True,
|
"has_mean": True,
|
||||||
|
"mean_type": StatisticMeanType.ARITHMETIC,
|
||||||
"has_sum": False,
|
"has_sum": False,
|
||||||
"name": None,
|
"name": None,
|
||||||
"source": "recorder",
|
"source": "recorder",
|
||||||
@ -2324,6 +2853,7 @@ async def test_change_statistics_unit(
|
|||||||
"statistic_id": "sensor.test",
|
"statistic_id": "sensor.test",
|
||||||
"display_unit_of_measurement": "kW",
|
"display_unit_of_measurement": "kW",
|
||||||
"has_mean": True,
|
"has_mean": True,
|
||||||
|
"mean_type": StatisticMeanType.ARITHMETIC,
|
||||||
"has_sum": False,
|
"has_sum": False,
|
||||||
"name": None,
|
"name": None,
|
||||||
"source": "recorder",
|
"source": "recorder",
|
||||||
@ -2375,6 +2905,7 @@ async def test_change_statistics_unit(
|
|||||||
"statistic_id": "sensor.test",
|
"statistic_id": "sensor.test",
|
||||||
"display_unit_of_measurement": "kW",
|
"display_unit_of_measurement": "kW",
|
||||||
"has_mean": True,
|
"has_mean": True,
|
||||||
|
"mean_type": StatisticMeanType.ARITHMETIC,
|
||||||
"has_sum": False,
|
"has_sum": False,
|
||||||
"name": None,
|
"name": None,
|
||||||
"source": "recorder",
|
"source": "recorder",
|
||||||
@ -2428,6 +2959,7 @@ async def test_change_statistics_unit(
|
|||||||
"statistic_id": "sensor.test",
|
"statistic_id": "sensor.test",
|
||||||
"display_unit_of_measurement": "kW",
|
"display_unit_of_measurement": "kW",
|
||||||
"has_mean": True,
|
"has_mean": True,
|
||||||
|
"mean_type": StatisticMeanType.ARITHMETIC,
|
||||||
"has_sum": False,
|
"has_sum": False,
|
||||||
"name": None,
|
"name": None,
|
||||||
"source": "recorder",
|
"source": "recorder",
|
||||||
@ -2455,6 +2987,7 @@ async def test_change_statistics_unit_errors(
|
|||||||
"statistic_id": "sensor.test",
|
"statistic_id": "sensor.test",
|
||||||
"display_unit_of_measurement": "kW",
|
"display_unit_of_measurement": "kW",
|
||||||
"has_mean": True,
|
"has_mean": True,
|
||||||
|
"mean_type": StatisticMeanType.ARITHMETIC,
|
||||||
"has_sum": False,
|
"has_sum": False,
|
||||||
"name": None,
|
"name": None,
|
||||||
"source": "recorder",
|
"source": "recorder",
|
||||||
@ -2774,6 +3307,7 @@ async def test_get_statistics_metadata(
|
|||||||
"""Test get_statistics_metadata."""
|
"""Test get_statistics_metadata."""
|
||||||
now = get_start_time(dt_util.utcnow())
|
now = get_start_time(dt_util.utcnow())
|
||||||
has_mean = attributes["state_class"] == "measurement"
|
has_mean = attributes["state_class"] == "measurement"
|
||||||
|
mean_type = StatisticMeanType.ARITHMETIC if has_mean else StatisticMeanType.NONE
|
||||||
has_sum = not has_mean
|
has_sum = not has_mean
|
||||||
|
|
||||||
hass.config.units = units
|
hass.config.units = units
|
||||||
@ -2843,6 +3377,7 @@ async def test_get_statistics_metadata(
|
|||||||
"statistic_id": "test:total_gas",
|
"statistic_id": "test:total_gas",
|
||||||
"display_unit_of_measurement": unit,
|
"display_unit_of_measurement": unit,
|
||||||
"has_mean": has_mean,
|
"has_mean": has_mean,
|
||||||
|
"mean_type": mean_type,
|
||||||
"has_sum": has_sum,
|
"has_sum": has_sum,
|
||||||
"name": "Total imported energy",
|
"name": "Total imported energy",
|
||||||
"source": "test",
|
"source": "test",
|
||||||
@ -2874,6 +3409,7 @@ async def test_get_statistics_metadata(
|
|||||||
"statistic_id": "sensor.test",
|
"statistic_id": "sensor.test",
|
||||||
"display_unit_of_measurement": attributes["unit_of_measurement"],
|
"display_unit_of_measurement": attributes["unit_of_measurement"],
|
||||||
"has_mean": has_mean,
|
"has_mean": has_mean,
|
||||||
|
"mean_type": mean_type,
|
||||||
"has_sum": has_sum,
|
"has_sum": has_sum,
|
||||||
"name": None,
|
"name": None,
|
||||||
"source": "recorder",
|
"source": "recorder",
|
||||||
@ -2901,6 +3437,7 @@ async def test_get_statistics_metadata(
|
|||||||
"statistic_id": "sensor.test",
|
"statistic_id": "sensor.test",
|
||||||
"display_unit_of_measurement": attributes["unit_of_measurement"],
|
"display_unit_of_measurement": attributes["unit_of_measurement"],
|
||||||
"has_mean": has_mean,
|
"has_mean": has_mean,
|
||||||
|
"mean_type": mean_type,
|
||||||
"has_sum": has_sum,
|
"has_sum": has_sum,
|
||||||
"name": None,
|
"name": None,
|
||||||
"source": "recorder",
|
"source": "recorder",
|
||||||
@ -2995,6 +3532,7 @@ async def test_import_statistics(
|
|||||||
{
|
{
|
||||||
"display_unit_of_measurement": "kWh",
|
"display_unit_of_measurement": "kWh",
|
||||||
"has_mean": False,
|
"has_mean": False,
|
||||||
|
"mean_type": StatisticMeanType.NONE,
|
||||||
"has_sum": True,
|
"has_sum": True,
|
||||||
"statistic_id": statistic_id,
|
"statistic_id": statistic_id,
|
||||||
"name": "Total imported energy",
|
"name": "Total imported energy",
|
||||||
@ -3009,6 +3547,7 @@ async def test_import_statistics(
|
|||||||
1,
|
1,
|
||||||
{
|
{
|
||||||
"has_mean": False,
|
"has_mean": False,
|
||||||
|
"mean_type": StatisticMeanType.NONE,
|
||||||
"has_sum": True,
|
"has_sum": True,
|
||||||
"name": "Total imported energy",
|
"name": "Total imported energy",
|
||||||
"source": source,
|
"source": source,
|
||||||
@ -3213,6 +3752,7 @@ async def test_adjust_sum_statistics_energy(
|
|||||||
{
|
{
|
||||||
"display_unit_of_measurement": "kWh",
|
"display_unit_of_measurement": "kWh",
|
||||||
"has_mean": False,
|
"has_mean": False,
|
||||||
|
"mean_type": StatisticMeanType.NONE,
|
||||||
"has_sum": True,
|
"has_sum": True,
|
||||||
"statistic_id": statistic_id,
|
"statistic_id": statistic_id,
|
||||||
"name": "Total imported energy",
|
"name": "Total imported energy",
|
||||||
@ -3227,6 +3767,7 @@ async def test_adjust_sum_statistics_energy(
|
|||||||
1,
|
1,
|
||||||
{
|
{
|
||||||
"has_mean": False,
|
"has_mean": False,
|
||||||
|
"mean_type": StatisticMeanType.NONE,
|
||||||
"has_sum": True,
|
"has_sum": True,
|
||||||
"name": "Total imported energy",
|
"name": "Total imported energy",
|
||||||
"source": source,
|
"source": source,
|
||||||
@ -3406,6 +3947,7 @@ async def test_adjust_sum_statistics_gas(
|
|||||||
{
|
{
|
||||||
"display_unit_of_measurement": "m³",
|
"display_unit_of_measurement": "m³",
|
||||||
"has_mean": False,
|
"has_mean": False,
|
||||||
|
"mean_type": StatisticMeanType.NONE,
|
||||||
"has_sum": True,
|
"has_sum": True,
|
||||||
"statistic_id": statistic_id,
|
"statistic_id": statistic_id,
|
||||||
"name": "Total imported energy",
|
"name": "Total imported energy",
|
||||||
@ -3420,6 +3962,7 @@ async def test_adjust_sum_statistics_gas(
|
|||||||
1,
|
1,
|
||||||
{
|
{
|
||||||
"has_mean": False,
|
"has_mean": False,
|
||||||
|
"mean_type": StatisticMeanType.NONE,
|
||||||
"has_sum": True,
|
"has_sum": True,
|
||||||
"name": "Total imported energy",
|
"name": "Total imported energy",
|
||||||
"source": source,
|
"source": source,
|
||||||
@ -3617,6 +4160,7 @@ async def test_adjust_sum_statistics_errors(
|
|||||||
{
|
{
|
||||||
"display_unit_of_measurement": state_unit,
|
"display_unit_of_measurement": state_unit,
|
||||||
"has_mean": False,
|
"has_mean": False,
|
||||||
|
"mean_type": StatisticMeanType.NONE,
|
||||||
"has_sum": True,
|
"has_sum": True,
|
||||||
"statistic_id": statistic_id,
|
"statistic_id": statistic_id,
|
||||||
"name": "Total imported energy",
|
"name": "Total imported energy",
|
||||||
@ -3631,6 +4175,7 @@ async def test_adjust_sum_statistics_errors(
|
|||||||
1,
|
1,
|
||||||
{
|
{
|
||||||
"has_mean": False,
|
"has_mean": False,
|
||||||
|
"mean_type": StatisticMeanType.NONE,
|
||||||
"has_sum": True,
|
"has_sum": True,
|
||||||
"name": "Total imported energy",
|
"name": "Total imported energy",
|
||||||
"source": source,
|
"source": source,
|
||||||
|
File diff suppressed because it is too large
Load Diff
Loading…
x
Reference in New Issue
Block a user