mirror of
https://github.com/home-assistant/core.git
synced 2025-04-24 17:27:52 +00:00
Add type hints to integration tests (recorder) (#88313)
This commit is contained in:
parent
0a80ac19bc
commit
185cd61cbd
@ -3,7 +3,7 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections import OrderedDict
|
||||
from collections.abc import Awaitable, Callable, Generator, Mapping, Sequence
|
||||
from collections.abc import Generator, Mapping, Sequence
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime, timedelta, timezone
|
||||
import functools as ft
|
||||
@ -977,9 +977,6 @@ def assert_setup_component(count, domain=None):
|
||||
), f"setup_component failed, expected {count} got {res_len}: {res}"
|
||||
|
||||
|
||||
SetupRecorderInstanceT = Callable[..., Awaitable[Any]]
|
||||
|
||||
|
||||
def init_recorder_component(hass, add_config=None, db_url="sqlite://"):
|
||||
"""Initialize the recorder."""
|
||||
# Local import to avoid processing recorder and SQLite modules when running a
|
||||
|
@ -1,16 +1,15 @@
|
||||
"""Test backup platform for the Recorder integration."""
|
||||
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.recorder import Recorder
|
||||
from homeassistant.components.recorder.backup import async_post_backup, async_pre_backup
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
|
||||
async def test_async_pre_backup(recorder_mock, hass: HomeAssistant) -> None:
|
||||
async def test_async_pre_backup(recorder_mock: Recorder, hass: HomeAssistant) -> None:
|
||||
"""Test pre backup."""
|
||||
with patch(
|
||||
"homeassistant.components.recorder.core.Recorder.lock_database"
|
||||
@ -20,7 +19,7 @@ async def test_async_pre_backup(recorder_mock, hass: HomeAssistant) -> None:
|
||||
|
||||
|
||||
async def test_async_pre_backup_with_timeout(
|
||||
recorder_mock, hass: HomeAssistant
|
||||
recorder_mock: Recorder, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test pre backup with timeout."""
|
||||
with patch(
|
||||
@ -32,7 +31,7 @@ async def test_async_pre_backup_with_timeout(
|
||||
|
||||
|
||||
async def test_async_pre_backup_with_migration(
|
||||
recorder_mock, hass: HomeAssistant
|
||||
recorder_mock: Recorder, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test pre backup with migration."""
|
||||
with patch(
|
||||
@ -42,7 +41,7 @@ async def test_async_pre_backup_with_migration(
|
||||
await async_pre_backup(hass)
|
||||
|
||||
|
||||
async def test_async_post_backup(recorder_mock, hass: HomeAssistant) -> None:
|
||||
async def test_async_post_backup(recorder_mock: Recorder, hass: HomeAssistant) -> None:
|
||||
"""Test post backup."""
|
||||
with patch(
|
||||
"homeassistant.components.recorder.core.Recorder.unlock_database"
|
||||
@ -51,7 +50,9 @@ async def test_async_post_backup(recorder_mock, hass: HomeAssistant) -> None:
|
||||
assert unlock_mock.called
|
||||
|
||||
|
||||
async def test_async_post_backup_failure(recorder_mock, hass: HomeAssistant) -> None:
|
||||
async def test_async_post_backup_failure(
|
||||
recorder_mock: Recorder, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test post backup failure."""
|
||||
with patch(
|
||||
"homeassistant.components.recorder.core.Recorder.unlock_database",
|
||||
|
@ -4,7 +4,7 @@ import json
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.engine.row import Row
|
||||
|
||||
from homeassistant.components.recorder import get_instance
|
||||
from homeassistant.components.recorder import Recorder, get_instance
|
||||
from homeassistant.components.recorder.db_schema import EventData, Events, States
|
||||
from homeassistant.components.recorder.filters import (
|
||||
Filters,
|
||||
@ -71,7 +71,9 @@ async def _async_get_states_and_events_with_filter(
|
||||
return filtered_states_entity_ids, filtered_events_entity_ids
|
||||
|
||||
|
||||
async def test_included_and_excluded_simple_case_no_domains(recorder_mock, hass):
|
||||
async def test_included_and_excluded_simple_case_no_domains(
|
||||
recorder_mock: Recorder, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test filters with included and excluded without domains."""
|
||||
filter_accept = {"sensor.kitchen4", "switch.kitchen"}
|
||||
filter_reject = {
|
||||
@ -127,7 +129,9 @@ async def test_included_and_excluded_simple_case_no_domains(recorder_mock, hass)
|
||||
assert not filtered_events_entity_ids.intersection(filter_reject)
|
||||
|
||||
|
||||
async def test_included_and_excluded_simple_case_no_globs(recorder_mock, hass):
|
||||
async def test_included_and_excluded_simple_case_no_globs(
|
||||
recorder_mock: Recorder, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test filters with included and excluded without globs."""
|
||||
filter_accept = {"switch.bla", "sensor.blu", "sensor.keep"}
|
||||
filter_reject = {"sensor.bli"}
|
||||
@ -168,8 +172,8 @@ async def test_included_and_excluded_simple_case_no_globs(recorder_mock, hass):
|
||||
|
||||
|
||||
async def test_included_and_excluded_simple_case_without_underscores(
|
||||
recorder_mock, hass
|
||||
):
|
||||
recorder_mock: Recorder, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test filters with included and excluded without underscores."""
|
||||
filter_accept = {"light.any", "sensor.kitchen4", "switch.kitchen"}
|
||||
filter_reject = {"switch.other", "cover.any", "sensor.weather5", "light.kitchen"}
|
||||
@ -221,7 +225,9 @@ async def test_included_and_excluded_simple_case_without_underscores(
|
||||
assert not filtered_events_entity_ids.intersection(filter_reject)
|
||||
|
||||
|
||||
async def test_included_and_excluded_simple_case_with_underscores(recorder_mock, hass):
|
||||
async def test_included_and_excluded_simple_case_with_underscores(
|
||||
recorder_mock: Recorder, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test filters with included and excluded with underscores."""
|
||||
filter_accept = {"light.any", "sensor.kitchen_4", "switch.kitchen"}
|
||||
filter_reject = {"switch.other", "cover.any", "sensor.weather_5", "light.kitchen"}
|
||||
@ -273,7 +279,9 @@ async def test_included_and_excluded_simple_case_with_underscores(recorder_mock,
|
||||
assert not filtered_events_entity_ids.intersection(filter_reject)
|
||||
|
||||
|
||||
async def test_included_and_excluded_complex_case(recorder_mock, hass):
|
||||
async def test_included_and_excluded_complex_case(
|
||||
recorder_mock: Recorder, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test filters with included and excluded with a complex filter."""
|
||||
filter_accept = {"light.any", "sensor.kitchen_4", "switch.kitchen"}
|
||||
filter_reject = {
|
||||
@ -330,7 +338,9 @@ async def test_included_and_excluded_complex_case(recorder_mock, hass):
|
||||
assert not filtered_events_entity_ids.intersection(filter_reject)
|
||||
|
||||
|
||||
async def test_included_entities_and_excluded_domain(recorder_mock, hass):
|
||||
async def test_included_entities_and_excluded_domain(
|
||||
recorder_mock: Recorder, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test filters with included entities and excluded domain."""
|
||||
filter_accept = {
|
||||
"media_player.test",
|
||||
@ -376,7 +386,9 @@ async def test_included_entities_and_excluded_domain(recorder_mock, hass):
|
||||
assert not filtered_events_entity_ids.intersection(filter_reject)
|
||||
|
||||
|
||||
async def test_same_domain_included_excluded(recorder_mock, hass):
|
||||
async def test_same_domain_included_excluded(
|
||||
recorder_mock: Recorder, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test filters with the same domain included and excluded."""
|
||||
filter_accept = {
|
||||
"media_player.test",
|
||||
@ -422,7 +434,9 @@ async def test_same_domain_included_excluded(recorder_mock, hass):
|
||||
assert not filtered_events_entity_ids.intersection(filter_reject)
|
||||
|
||||
|
||||
async def test_same_entity_included_excluded(recorder_mock, hass):
|
||||
async def test_same_entity_included_excluded(
|
||||
recorder_mock: Recorder, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test filters with the same entity included and excluded."""
|
||||
filter_accept = {
|
||||
"media_player.test",
|
||||
@ -468,7 +482,9 @@ async def test_same_entity_included_excluded(recorder_mock, hass):
|
||||
assert not filtered_events_entity_ids.intersection(filter_reject)
|
||||
|
||||
|
||||
async def test_same_entity_included_excluded_include_domain_wins(recorder_mock, hass):
|
||||
async def test_same_entity_included_excluded_include_domain_wins(
|
||||
recorder_mock: Recorder, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test filters with domain and entities and the include domain wins."""
|
||||
filter_accept = {
|
||||
"media_player.test2",
|
||||
@ -516,7 +532,9 @@ async def test_same_entity_included_excluded_include_domain_wins(recorder_mock,
|
||||
assert not filtered_events_entity_ids.intersection(filter_reject)
|
||||
|
||||
|
||||
async def test_specificly_included_entity_always_wins(recorder_mock, hass):
|
||||
async def test_specificly_included_entity_always_wins(
|
||||
recorder_mock: Recorder, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test specificlly included entity always wins."""
|
||||
filter_accept = {
|
||||
"media_player.test2",
|
||||
@ -564,7 +582,9 @@ async def test_specificly_included_entity_always_wins(recorder_mock, hass):
|
||||
assert not filtered_events_entity_ids.intersection(filter_reject)
|
||||
|
||||
|
||||
async def test_specificly_included_entity_always_wins_over_glob(recorder_mock, hass):
|
||||
async def test_specificly_included_entity_always_wins_over_glob(
|
||||
recorder_mock: Recorder, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test specificlly included entity always wins over a glob."""
|
||||
filter_accept = {
|
||||
"sensor.apc900va_status",
|
||||
|
@ -1,6 +1,8 @@
|
||||
"""The tests the History component."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
from copy import copy
|
||||
from datetime import datetime, timedelta
|
||||
@ -11,7 +13,7 @@ import pytest
|
||||
from sqlalchemy import text
|
||||
|
||||
from homeassistant.components import recorder
|
||||
from homeassistant.components.recorder import get_instance, history
|
||||
from homeassistant.components.recorder import Recorder, get_instance, history
|
||||
from homeassistant.components.recorder.db_schema import (
|
||||
Events,
|
||||
RecorderRuns,
|
||||
@ -39,7 +41,8 @@ from .common import (
|
||||
wait_recording_done,
|
||||
)
|
||||
|
||||
from tests.common import SetupRecorderInstanceT, mock_state_change_event
|
||||
from tests.common import mock_state_change_event
|
||||
from tests.typing import RecorderInstanceGenerator
|
||||
|
||||
|
||||
async def _async_get_states(
|
||||
@ -148,7 +151,9 @@ def _setup_get_states(hass):
|
||||
return now, future, states
|
||||
|
||||
|
||||
def test_get_full_significant_states_with_session_entity_no_matches(hass_recorder):
|
||||
def test_get_full_significant_states_with_session_entity_no_matches(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test getting states at a specific point in time for entities that never have been recorded."""
|
||||
hass = hass_recorder()
|
||||
now = dt_util.utcnow()
|
||||
@ -173,8 +178,8 @@ def test_get_full_significant_states_with_session_entity_no_matches(hass_recorde
|
||||
|
||||
|
||||
def test_significant_states_with_session_entity_minimal_response_no_matches(
|
||||
hass_recorder,
|
||||
):
|
||||
hass_recorder: Callable[..., HomeAssistant],
|
||||
) -> None:
|
||||
"""Test getting states at a specific point in time for entities that never have been recorded."""
|
||||
hass = hass_recorder()
|
||||
now = dt_util.utcnow()
|
||||
@ -213,7 +218,9 @@ def test_significant_states_with_session_entity_minimal_response_no_matches(
|
||||
({}, True, 3),
|
||||
],
|
||||
)
|
||||
def test_state_changes_during_period(hass_recorder, attributes, no_attributes, limit):
|
||||
def test_state_changes_during_period(
|
||||
hass_recorder: Callable[..., HomeAssistant], attributes, no_attributes, limit
|
||||
) -> None:
|
||||
"""Test state change during period."""
|
||||
hass = hass_recorder()
|
||||
entity_id = "media_player.test"
|
||||
@ -257,7 +264,9 @@ def test_state_changes_during_period(hass_recorder, attributes, no_attributes, l
|
||||
assert_multiple_states_equal_without_context(states[:limit], hist[entity_id])
|
||||
|
||||
|
||||
def test_state_changes_during_period_descending(hass_recorder):
|
||||
def test_state_changes_during_period_descending(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test state change during period descending."""
|
||||
hass = hass_recorder()
|
||||
entity_id = "media_player.test"
|
||||
@ -317,7 +326,7 @@ def test_state_changes_during_period_descending(hass_recorder):
|
||||
)
|
||||
|
||||
|
||||
def test_get_last_state_changes(hass_recorder):
|
||||
def test_get_last_state_changes(hass_recorder: Callable[..., HomeAssistant]) -> None:
|
||||
"""Test number of state changes."""
|
||||
hass = hass_recorder()
|
||||
entity_id = "sensor.test"
|
||||
@ -353,7 +362,9 @@ def test_get_last_state_changes(hass_recorder):
|
||||
assert_multiple_states_equal_without_context(states, hist[entity_id])
|
||||
|
||||
|
||||
def test_ensure_state_can_be_copied(hass_recorder):
|
||||
def test_ensure_state_can_be_copied(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Ensure a state can pass though copy().
|
||||
|
||||
The filter integration uses copy() on states
|
||||
@ -387,7 +398,7 @@ def test_ensure_state_can_be_copied(hass_recorder):
|
||||
assert_states_equal_without_context(copy(hist[entity_id][1]), hist[entity_id][1])
|
||||
|
||||
|
||||
def test_get_significant_states(hass_recorder):
|
||||
def test_get_significant_states(hass_recorder: Callable[..., HomeAssistant]) -> None:
|
||||
"""Test that only significant states are returned.
|
||||
|
||||
We should get back every thermostat change that
|
||||
@ -400,7 +411,9 @@ def test_get_significant_states(hass_recorder):
|
||||
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
|
||||
|
||||
|
||||
def test_get_significant_states_minimal_response(hass_recorder):
|
||||
def test_get_significant_states_minimal_response(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test that only significant states are returned.
|
||||
|
||||
When minimal responses is set only the first and
|
||||
@ -465,7 +478,9 @@ def test_get_significant_states_minimal_response(hass_recorder):
|
||||
|
||||
|
||||
@pytest.mark.parametrize("time_zone", ["Europe/Berlin", "US/Hawaii", "UTC"])
|
||||
def test_get_significant_states_with_initial(time_zone, hass_recorder):
|
||||
def test_get_significant_states_with_initial(
|
||||
time_zone, hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test that only significant states are returned.
|
||||
|
||||
We should get back every thermostat change that
|
||||
@ -493,7 +508,9 @@ def test_get_significant_states_with_initial(time_zone, hass_recorder):
|
||||
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
|
||||
|
||||
|
||||
def test_get_significant_states_without_initial(hass_recorder):
|
||||
def test_get_significant_states_without_initial(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test that only significant states are returned.
|
||||
|
||||
We should get back every thermostat change that
|
||||
@ -524,7 +541,9 @@ def test_get_significant_states_without_initial(hass_recorder):
|
||||
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
|
||||
|
||||
|
||||
def test_get_significant_states_entity_id(hass_recorder):
|
||||
def test_get_significant_states_entity_id(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test that only significant states are returned for one entity."""
|
||||
hass = hass_recorder()
|
||||
zero, four, states = record_states(hass)
|
||||
@ -538,7 +557,9 @@ def test_get_significant_states_entity_id(hass_recorder):
|
||||
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
|
||||
|
||||
|
||||
def test_get_significant_states_multiple_entity_ids(hass_recorder):
|
||||
def test_get_significant_states_multiple_entity_ids(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test that only significant states are returned for one entity."""
|
||||
hass = hass_recorder()
|
||||
zero, four, states = record_states(hass)
|
||||
@ -558,7 +579,9 @@ def test_get_significant_states_multiple_entity_ids(hass_recorder):
|
||||
)
|
||||
|
||||
|
||||
def test_get_significant_states_are_ordered(hass_recorder):
|
||||
def test_get_significant_states_are_ordered(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test order of results from get_significant_states.
|
||||
|
||||
When entity ids are given, the results should be returned with the data
|
||||
@ -574,7 +597,9 @@ def test_get_significant_states_are_ordered(hass_recorder):
|
||||
assert list(hist.keys()) == entity_ids
|
||||
|
||||
|
||||
def test_get_significant_states_only(hass_recorder):
|
||||
def test_get_significant_states_only(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test significant states when significant_states_only is set."""
|
||||
hass = hass_recorder()
|
||||
entity_id = "sensor.test"
|
||||
@ -638,7 +663,9 @@ def test_get_significant_states_only(hass_recorder):
|
||||
)
|
||||
|
||||
|
||||
async def test_get_significant_states_only_minimal_response(recorder_mock, hass):
|
||||
async def test_get_significant_states_only_minimal_response(
|
||||
recorder_mock: Recorder, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test significant states when significant_states_only is True."""
|
||||
now = dt_util.utcnow()
|
||||
await async_recorder_block_till_done(hass)
|
||||
@ -745,10 +772,10 @@ def record_states(hass) -> tuple[datetime, datetime, dict[str, list[State]]]:
|
||||
|
||||
|
||||
async def test_state_changes_during_period_query_during_migration_to_schema_25(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT,
|
||||
hass: ha.HomeAssistant,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
recorder_db_url: str,
|
||||
):
|
||||
) -> None:
|
||||
"""Test we can query data prior to schema 25 and during migration to schema 25."""
|
||||
if recorder_db_url.startswith(("mysql://", "postgresql://")):
|
||||
# This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop table state_attributes
|
||||
@ -800,10 +827,10 @@ async def test_state_changes_during_period_query_during_migration_to_schema_25(
|
||||
|
||||
|
||||
async def test_get_states_query_during_migration_to_schema_25(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT,
|
||||
hass: ha.HomeAssistant,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
recorder_db_url: str,
|
||||
):
|
||||
) -> None:
|
||||
"""Test we can query data prior to schema 25 and during migration to schema 25."""
|
||||
if recorder_db_url.startswith(("mysql://", "postgresql://")):
|
||||
# This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop table state_attributes
|
||||
@ -851,10 +878,10 @@ async def test_get_states_query_during_migration_to_schema_25(
|
||||
|
||||
|
||||
async def test_get_states_query_during_migration_to_schema_25_multiple_entities(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT,
|
||||
hass: ha.HomeAssistant,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
recorder_db_url: str,
|
||||
):
|
||||
) -> None:
|
||||
"""Test we can query data prior to schema 25 and during migration to schema 25."""
|
||||
if recorder_db_url.startswith(("mysql://", "postgresql://")):
|
||||
# This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop table state_attributes
|
||||
@ -905,9 +932,9 @@ async def test_get_states_query_during_migration_to_schema_25_multiple_entities(
|
||||
|
||||
|
||||
async def test_get_full_significant_states_handles_empty_last_changed(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT,
|
||||
hass: ha.HomeAssistant,
|
||||
):
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
) -> None:
|
||||
"""Test getting states when last_changed is null."""
|
||||
await async_setup_recorder_instance(hass, {})
|
||||
|
||||
@ -995,7 +1022,9 @@ async def test_get_full_significant_states_handles_empty_last_changed(
|
||||
)
|
||||
|
||||
|
||||
def test_state_changes_during_period_multiple_entities_single_test(hass_recorder):
|
||||
def test_state_changes_during_period_multiple_entities_single_test(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test state change during period with multiple entities in the same test.
|
||||
|
||||
This test ensures the sqlalchemy query cache does not
|
||||
@ -1026,9 +1055,9 @@ def test_state_changes_during_period_multiple_entities_single_test(hass_recorder
|
||||
|
||||
@pytest.mark.freeze_time("2039-01-19 03:14:07.555555-00:00")
|
||||
async def test_get_full_significant_states_past_year_2038(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT,
|
||||
hass: ha.HomeAssistant,
|
||||
):
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
) -> None:
|
||||
"""Test we can store times past year 2038."""
|
||||
await async_setup_recorder_instance(hass, {})
|
||||
past_2038_time = dt_util.parse_datetime("2039-01-19 03:14:07.555555-00:00")
|
||||
|
@ -1,6 +1,8 @@
|
||||
"""The tests the History component."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
from copy import copy
|
||||
from datetime import datetime, timedelta
|
||||
@ -17,7 +19,7 @@ from homeassistant.components import recorder
|
||||
from homeassistant.components.recorder import core, history, statistics
|
||||
from homeassistant.components.recorder.models import process_timestamp
|
||||
from homeassistant.components.recorder.util import session_scope
|
||||
from homeassistant.core import State
|
||||
from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.helpers.json import JSONEncoder
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
@ -75,7 +77,9 @@ def db_schema_30():
|
||||
yield
|
||||
|
||||
|
||||
def test_get_full_significant_states_with_session_entity_no_matches(hass_recorder):
|
||||
def test_get_full_significant_states_with_session_entity_no_matches(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test getting states at a specific point in time for entities that never have been recorded."""
|
||||
hass = hass_recorder()
|
||||
now = dt_util.utcnow()
|
||||
@ -100,8 +104,8 @@ def test_get_full_significant_states_with_session_entity_no_matches(hass_recorde
|
||||
|
||||
|
||||
def test_significant_states_with_session_entity_minimal_response_no_matches(
|
||||
hass_recorder,
|
||||
):
|
||||
hass_recorder: Callable[..., HomeAssistant],
|
||||
) -> None:
|
||||
"""Test getting states at a specific point in time for entities that never have been recorded."""
|
||||
hass = hass_recorder()
|
||||
now = dt_util.utcnow()
|
||||
@ -140,7 +144,9 @@ def test_significant_states_with_session_entity_minimal_response_no_matches(
|
||||
({}, True, 3),
|
||||
],
|
||||
)
|
||||
def test_state_changes_during_period(hass_recorder, attributes, no_attributes, limit):
|
||||
def test_state_changes_during_period(
|
||||
hass_recorder: Callable[..., HomeAssistant], attributes, no_attributes, limit
|
||||
) -> None:
|
||||
"""Test state change during period."""
|
||||
hass = hass_recorder()
|
||||
entity_id = "media_player.test"
|
||||
@ -184,7 +190,9 @@ def test_state_changes_during_period(hass_recorder, attributes, no_attributes, l
|
||||
assert_multiple_states_equal_without_context(states[:limit], hist[entity_id])
|
||||
|
||||
|
||||
def test_state_changes_during_period_descending(hass_recorder):
|
||||
def test_state_changes_during_period_descending(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test state change during period descending."""
|
||||
hass = hass_recorder()
|
||||
entity_id = "media_player.test"
|
||||
@ -244,7 +252,7 @@ def test_state_changes_during_period_descending(hass_recorder):
|
||||
)
|
||||
|
||||
|
||||
def test_get_last_state_changes(hass_recorder):
|
||||
def test_get_last_state_changes(hass_recorder: Callable[..., HomeAssistant]) -> None:
|
||||
"""Test number of state changes."""
|
||||
hass = hass_recorder()
|
||||
entity_id = "sensor.test"
|
||||
@ -280,7 +288,9 @@ def test_get_last_state_changes(hass_recorder):
|
||||
assert_multiple_states_equal_without_context(states, hist[entity_id])
|
||||
|
||||
|
||||
def test_ensure_state_can_be_copied(hass_recorder):
|
||||
def test_ensure_state_can_be_copied(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Ensure a state can pass though copy().
|
||||
|
||||
The filter integration uses copy() on states
|
||||
@ -314,7 +324,7 @@ def test_ensure_state_can_be_copied(hass_recorder):
|
||||
assert_states_equal_without_context(copy(hist[entity_id][1]), hist[entity_id][1])
|
||||
|
||||
|
||||
def test_get_significant_states(hass_recorder):
|
||||
def test_get_significant_states(hass_recorder: Callable[..., HomeAssistant]) -> None:
|
||||
"""Test that only significant states are returned.
|
||||
|
||||
We should get back every thermostat change that
|
||||
@ -327,7 +337,9 @@ def test_get_significant_states(hass_recorder):
|
||||
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
|
||||
|
||||
|
||||
def test_get_significant_states_minimal_response(hass_recorder):
|
||||
def test_get_significant_states_minimal_response(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test that only significant states are returned.
|
||||
|
||||
When minimal responses is set only the first and
|
||||
@ -390,7 +402,9 @@ def test_get_significant_states_minimal_response(hass_recorder):
|
||||
)
|
||||
|
||||
|
||||
def test_get_significant_states_with_initial(hass_recorder):
|
||||
def test_get_significant_states_with_initial(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test that only significant states are returned.
|
||||
|
||||
We should get back every thermostat change that
|
||||
@ -419,7 +433,9 @@ def test_get_significant_states_with_initial(hass_recorder):
|
||||
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
|
||||
|
||||
|
||||
def test_get_significant_states_without_initial(hass_recorder):
|
||||
def test_get_significant_states_without_initial(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test that only significant states are returned.
|
||||
|
||||
We should get back every thermostat change that
|
||||
@ -450,7 +466,9 @@ def test_get_significant_states_without_initial(hass_recorder):
|
||||
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
|
||||
|
||||
|
||||
def test_get_significant_states_entity_id(hass_recorder):
|
||||
def test_get_significant_states_entity_id(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test that only significant states are returned for one entity."""
|
||||
hass = hass_recorder()
|
||||
zero, four, states = record_states(hass)
|
||||
@ -464,7 +482,9 @@ def test_get_significant_states_entity_id(hass_recorder):
|
||||
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
|
||||
|
||||
|
||||
def test_get_significant_states_multiple_entity_ids(hass_recorder):
|
||||
def test_get_significant_states_multiple_entity_ids(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test that only significant states are returned for one entity."""
|
||||
hass = hass_recorder()
|
||||
zero, four, states = record_states(hass)
|
||||
@ -487,7 +507,9 @@ def test_get_significant_states_multiple_entity_ids(hass_recorder):
|
||||
)
|
||||
|
||||
|
||||
def test_get_significant_states_are_ordered(hass_recorder):
|
||||
def test_get_significant_states_are_ordered(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test order of results from get_significant_states.
|
||||
|
||||
When entity ids are given, the results should be returned with the data
|
||||
@ -503,7 +525,9 @@ def test_get_significant_states_are_ordered(hass_recorder):
|
||||
assert list(hist.keys()) == entity_ids
|
||||
|
||||
|
||||
def test_get_significant_states_only(hass_recorder):
|
||||
def test_get_significant_states_only(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test significant states when significant_states_only is set."""
|
||||
hass = hass_recorder()
|
||||
entity_id = "sensor.test"
|
||||
@ -652,7 +676,9 @@ def record_states(hass) -> tuple[datetime, datetime, dict[str, list[State]]]:
|
||||
return zero, four, states
|
||||
|
||||
|
||||
def test_state_changes_during_period_multiple_entities_single_test(hass_recorder):
|
||||
def test_state_changes_during_period_multiple_entities_single_test(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test state change during period with multiple entities in the same test.
|
||||
|
||||
This test ensures the sqlalchemy query cache does not
|
||||
|
@ -2,7 +2,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
import sqlite3
|
||||
import threading
|
||||
from typing import cast
|
||||
@ -72,11 +74,11 @@ from .common import (
|
||||
)
|
||||
|
||||
from tests.common import (
|
||||
SetupRecorderInstanceT,
|
||||
async_fire_time_changed,
|
||||
fire_time_changed,
|
||||
get_test_home_assistant,
|
||||
)
|
||||
from tests.typing import RecorderInstanceGenerator
|
||||
|
||||
|
||||
def _default_recorder(hass):
|
||||
@ -97,11 +99,11 @@ def _default_recorder(hass):
|
||||
|
||||
|
||||
async def test_shutdown_before_startup_finishes(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
recorder_db_url: str,
|
||||
tmp_path,
|
||||
):
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test shutdown before recorder starts is clean."""
|
||||
if recorder_db_url == "sqlite://":
|
||||
# On-disk database because this test does not play nice with the
|
||||
@ -137,10 +139,10 @@ async def test_shutdown_before_startup_finishes(
|
||||
|
||||
|
||||
async def test_canceled_before_startup_finishes(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
):
|
||||
) -> None:
|
||||
"""Test recorder shuts down when its startup future is canceled out from under it."""
|
||||
hass.state = CoreState.not_running
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
@ -161,7 +163,9 @@ async def test_canceled_before_startup_finishes(
|
||||
await hass.async_add_executor_job(instance._shutdown)
|
||||
|
||||
|
||||
async def test_shutdown_closes_connections(recorder_mock, hass):
|
||||
async def test_shutdown_closes_connections(
|
||||
recorder_mock: Recorder, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test shutdown closes connections."""
|
||||
|
||||
hass.state = CoreState.not_running
|
||||
@ -187,8 +191,8 @@ async def test_shutdown_closes_connections(recorder_mock, hass):
|
||||
|
||||
|
||||
async def test_state_gets_saved_when_set_before_start_event(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT, hass: HomeAssistant
|
||||
):
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test we can record an event when starting with not running."""
|
||||
|
||||
hass.state = CoreState.not_running
|
||||
@ -213,7 +217,7 @@ async def test_state_gets_saved_when_set_before_start_event(
|
||||
assert db_states[0].event_id is None
|
||||
|
||||
|
||||
async def test_saving_state(recorder_mock, hass: HomeAssistant):
|
||||
async def test_saving_state(recorder_mock: Recorder, hass: HomeAssistant) -> None:
|
||||
"""Test saving and restoring a state."""
|
||||
entity_id = "test.recorder"
|
||||
state = "restoring_from_db"
|
||||
@ -248,8 +252,8 @@ async def test_saving_state(recorder_mock, hass: HomeAssistant):
|
||||
),
|
||||
)
|
||||
async def test_saving_state_with_nul(
|
||||
recorder_mock, hass: HomeAssistant, dialect_name, expected_attributes
|
||||
):
|
||||
recorder_mock: Recorder, hass: HomeAssistant, dialect_name, expected_attributes
|
||||
) -> None:
|
||||
"""Test saving and restoring a state with nul in attributes."""
|
||||
entity_id = "test.recorder"
|
||||
state = "restoring_from_db"
|
||||
@ -280,8 +284,8 @@ async def test_saving_state_with_nul(
|
||||
|
||||
|
||||
async def test_saving_many_states(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT, hass: HomeAssistant
|
||||
):
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test we expire after many commits."""
|
||||
instance = await async_setup_recorder_instance(
|
||||
hass, {recorder.CONF_COMMIT_INTERVAL: 0}
|
||||
@ -308,8 +312,8 @@ async def test_saving_many_states(
|
||||
|
||||
|
||||
async def test_saving_state_with_intermixed_time_changes(
|
||||
recorder_mock, hass: HomeAssistant
|
||||
):
|
||||
recorder_mock: Recorder, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test saving states with intermixed time changes."""
|
||||
entity_id = "test.recorder"
|
||||
state = "restoring_from_db"
|
||||
@ -331,7 +335,11 @@ async def test_saving_state_with_intermixed_time_changes(
|
||||
assert db_states[0].event_id is None
|
||||
|
||||
|
||||
def test_saving_state_with_exception(hass_recorder, hass, caplog):
|
||||
def test_saving_state_with_exception(
|
||||
hass_recorder: Callable[..., HomeAssistant],
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test saving and restoring a state."""
|
||||
hass = hass_recorder()
|
||||
|
||||
@ -369,7 +377,11 @@ def test_saving_state_with_exception(hass_recorder, hass, caplog):
|
||||
assert "Error saving events" not in caplog.text
|
||||
|
||||
|
||||
def test_saving_state_with_sqlalchemy_exception(hass_recorder, hass, caplog):
|
||||
def test_saving_state_with_sqlalchemy_exception(
|
||||
hass_recorder: Callable[..., HomeAssistant],
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test saving state when there is an SQLAlchemyError."""
|
||||
hass = hass_recorder()
|
||||
|
||||
@ -408,8 +420,10 @@ def test_saving_state_with_sqlalchemy_exception(hass_recorder, hass, caplog):
|
||||
|
||||
|
||||
async def test_force_shutdown_with_queue_of_writes_that_generate_exceptions(
|
||||
async_setup_recorder_instance, hass, caplog
|
||||
):
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test forcing shutdown."""
|
||||
instance = await async_setup_recorder_instance(hass)
|
||||
|
||||
@ -437,7 +451,7 @@ async def test_force_shutdown_with_queue_of_writes_that_generate_exceptions(
|
||||
assert "Error saving events" not in caplog.text
|
||||
|
||||
|
||||
def test_saving_event(hass_recorder):
|
||||
def test_saving_event(hass_recorder: Callable[..., HomeAssistant]) -> None:
|
||||
"""Test saving and restoring an event."""
|
||||
hass = hass_recorder()
|
||||
|
||||
@ -489,7 +503,9 @@ def test_saving_event(hass_recorder):
|
||||
)
|
||||
|
||||
|
||||
def test_saving_state_with_commit_interval_zero(hass_recorder):
|
||||
def test_saving_state_with_commit_interval_zero(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test saving a state with a commit interval of zero."""
|
||||
hass = hass_recorder({"commit_interval": 0})
|
||||
get_instance(hass).commit_interval == 0
|
||||
@ -554,14 +570,16 @@ def _state_with_context(hass, entity_id):
|
||||
return hass.states.get(entity_id)
|
||||
|
||||
|
||||
def test_setup_without_migration(hass_recorder):
|
||||
def test_setup_without_migration(hass_recorder: Callable[..., HomeAssistant]) -> None:
|
||||
"""Verify the schema version without a migration."""
|
||||
hass = hass_recorder()
|
||||
assert recorder.get_instance(hass).schema_version == SCHEMA_VERSION
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
def test_saving_state_include_domains(hass_recorder):
|
||||
def test_saving_state_include_domains(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test saving and restoring a state."""
|
||||
hass = hass_recorder({"include": {"domains": "test2"}})
|
||||
states = _add_entities(hass, ["test.recorder", "test2.recorder"])
|
||||
@ -569,7 +587,9 @@ def test_saving_state_include_domains(hass_recorder):
|
||||
assert _state_with_context(hass, "test2.recorder").as_dict() == states[0].as_dict()
|
||||
|
||||
|
||||
def test_saving_state_include_domains_globs(hass_recorder):
|
||||
def test_saving_state_include_domains_globs(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test saving and restoring a state."""
|
||||
hass = hass_recorder(
|
||||
{"include": {"domains": "test2", "entity_globs": "*.included_*"}}
|
||||
@ -590,7 +610,9 @@ def test_saving_state_include_domains_globs(hass_recorder):
|
||||
)
|
||||
|
||||
|
||||
def test_saving_state_incl_entities(hass_recorder):
|
||||
def test_saving_state_incl_entities(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test saving and restoring a state."""
|
||||
hass = hass_recorder({"include": {"entities": "test2.recorder"}})
|
||||
states = _add_entities(hass, ["test.recorder", "test2.recorder"])
|
||||
@ -598,7 +620,9 @@ def test_saving_state_incl_entities(hass_recorder):
|
||||
assert _state_with_context(hass, "test2.recorder").as_dict() == states[0].as_dict()
|
||||
|
||||
|
||||
def test_saving_event_exclude_event_type(hass_recorder):
|
||||
def test_saving_event_exclude_event_type(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test saving and restoring an event."""
|
||||
hass = hass_recorder(
|
||||
{
|
||||
@ -619,7 +643,9 @@ def test_saving_event_exclude_event_type(hass_recorder):
|
||||
assert events[0].event_type == "test2"
|
||||
|
||||
|
||||
def test_saving_state_exclude_domains(hass_recorder):
|
||||
def test_saving_state_exclude_domains(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test saving and restoring a state."""
|
||||
hass = hass_recorder({"exclude": {"domains": "test"}})
|
||||
states = _add_entities(hass, ["test.recorder", "test2.recorder"])
|
||||
@ -627,7 +653,9 @@ def test_saving_state_exclude_domains(hass_recorder):
|
||||
assert _state_with_context(hass, "test2.recorder").as_dict() == states[0].as_dict()
|
||||
|
||||
|
||||
def test_saving_state_exclude_domains_globs(hass_recorder):
|
||||
def test_saving_state_exclude_domains_globs(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test saving and restoring a state."""
|
||||
hass = hass_recorder(
|
||||
{"exclude": {"domains": "test", "entity_globs": "*.excluded_*"}}
|
||||
@ -639,7 +667,9 @@ def test_saving_state_exclude_domains_globs(hass_recorder):
|
||||
assert _state_with_context(hass, "test2.recorder").as_dict() == states[0].as_dict()
|
||||
|
||||
|
||||
def test_saving_state_exclude_entities(hass_recorder):
|
||||
def test_saving_state_exclude_entities(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test saving and restoring a state."""
|
||||
hass = hass_recorder({"exclude": {"entities": "test.recorder"}})
|
||||
states = _add_entities(hass, ["test.recorder", "test2.recorder"])
|
||||
@ -647,7 +677,9 @@ def test_saving_state_exclude_entities(hass_recorder):
|
||||
assert _state_with_context(hass, "test2.recorder").as_dict() == states[0].as_dict()
|
||||
|
||||
|
||||
def test_saving_state_exclude_domain_include_entity(hass_recorder):
|
||||
def test_saving_state_exclude_domain_include_entity(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test saving and restoring a state."""
|
||||
hass = hass_recorder(
|
||||
{"include": {"entities": "test.recorder"}, "exclude": {"domains": "test"}}
|
||||
@ -656,7 +688,9 @@ def test_saving_state_exclude_domain_include_entity(hass_recorder):
|
||||
assert len(states) == 2
|
||||
|
||||
|
||||
def test_saving_state_exclude_domain_glob_include_entity(hass_recorder):
|
||||
def test_saving_state_exclude_domain_glob_include_entity(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test saving and restoring a state."""
|
||||
hass = hass_recorder(
|
||||
{
|
||||
@ -670,7 +704,9 @@ def test_saving_state_exclude_domain_glob_include_entity(hass_recorder):
|
||||
assert len(states) == 3
|
||||
|
||||
|
||||
def test_saving_state_include_domain_exclude_entity(hass_recorder):
|
||||
def test_saving_state_include_domain_exclude_entity(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test saving and restoring a state."""
|
||||
hass = hass_recorder(
|
||||
{"exclude": {"entities": "test.recorder"}, "include": {"domains": "test"}}
|
||||
@ -681,7 +717,9 @@ def test_saving_state_include_domain_exclude_entity(hass_recorder):
|
||||
assert _state_with_context(hass, "test.ok").state == "state2"
|
||||
|
||||
|
||||
def test_saving_state_include_domain_glob_exclude_entity(hass_recorder):
|
||||
def test_saving_state_include_domain_glob_exclude_entity(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test saving and restoring a state."""
|
||||
hass = hass_recorder(
|
||||
{
|
||||
@ -697,7 +735,9 @@ def test_saving_state_include_domain_glob_exclude_entity(hass_recorder):
|
||||
assert _state_with_context(hass, "test.ok").state == "state2"
|
||||
|
||||
|
||||
def test_saving_state_and_removing_entity(hass_recorder):
|
||||
def test_saving_state_and_removing_entity(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test saving the state of a removed entity."""
|
||||
hass = hass_recorder()
|
||||
entity_id = "lock.mine"
|
||||
@ -718,7 +758,9 @@ def test_saving_state_and_removing_entity(hass_recorder):
|
||||
assert states[2].state is None
|
||||
|
||||
|
||||
def test_saving_state_with_oversized_attributes(hass_recorder, caplog):
|
||||
def test_saving_state_with_oversized_attributes(
|
||||
hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test saving states is limited to 16KiB of JSON encoded attributes."""
|
||||
hass = hass_recorder()
|
||||
massive_dict = {"a": "b" * 16384}
|
||||
@ -819,7 +861,7 @@ def run_tasks_at_time(hass, test_time):
|
||||
|
||||
|
||||
@pytest.mark.parametrize("enable_nightly_purge", [True])
|
||||
def test_auto_purge(hass_recorder):
|
||||
def test_auto_purge(hass_recorder: Callable[..., HomeAssistant]) -> None:
|
||||
"""Test periodic purge scheduling."""
|
||||
hass = hass_recorder()
|
||||
|
||||
@ -877,7 +919,9 @@ def test_auto_purge(hass_recorder):
|
||||
|
||||
|
||||
@pytest.mark.parametrize("enable_nightly_purge", [True])
|
||||
def test_auto_purge_auto_repack_on_second_sunday(hass_recorder):
|
||||
def test_auto_purge_auto_repack_on_second_sunday(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test periodic purge scheduling does a repack on the 2nd sunday."""
|
||||
hass = hass_recorder()
|
||||
|
||||
@ -915,7 +959,9 @@ def test_auto_purge_auto_repack_on_second_sunday(hass_recorder):
|
||||
|
||||
|
||||
@pytest.mark.parametrize("enable_nightly_purge", [True])
|
||||
def test_auto_purge_auto_repack_disabled_on_second_sunday(hass_recorder):
|
||||
def test_auto_purge_auto_repack_disabled_on_second_sunday(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test periodic purge scheduling does not auto repack on the 2nd sunday if disabled."""
|
||||
hass = hass_recorder({CONF_AUTO_REPACK: False})
|
||||
|
||||
@ -953,7 +999,9 @@ def test_auto_purge_auto_repack_disabled_on_second_sunday(hass_recorder):
|
||||
|
||||
|
||||
@pytest.mark.parametrize("enable_nightly_purge", [True])
|
||||
def test_auto_purge_no_auto_repack_on_not_second_sunday(hass_recorder):
|
||||
def test_auto_purge_no_auto_repack_on_not_second_sunday(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test periodic purge scheduling does not do a repack unless its the 2nd sunday."""
|
||||
hass = hass_recorder()
|
||||
|
||||
@ -992,7 +1040,7 @@ def test_auto_purge_no_auto_repack_on_not_second_sunday(hass_recorder):
|
||||
|
||||
|
||||
@pytest.mark.parametrize("enable_nightly_purge", [True])
|
||||
def test_auto_purge_disabled(hass_recorder):
|
||||
def test_auto_purge_disabled(hass_recorder: Callable[..., HomeAssistant]) -> None:
|
||||
"""Test periodic db cleanup still run when auto purge is disabled."""
|
||||
hass = hass_recorder({CONF_AUTO_PURGE: False})
|
||||
|
||||
@ -1028,7 +1076,7 @@ def test_auto_purge_disabled(hass_recorder):
|
||||
|
||||
|
||||
@pytest.mark.parametrize("enable_statistics", [True])
|
||||
def test_auto_statistics(hass_recorder, freezer):
|
||||
def test_auto_statistics(hass_recorder: Callable[..., HomeAssistant], freezer) -> None:
|
||||
"""Test periodic statistics scheduling."""
|
||||
hass = hass_recorder()
|
||||
|
||||
@ -1117,7 +1165,7 @@ def test_auto_statistics(hass_recorder, freezer):
|
||||
dt_util.set_default_time_zone(original_tz)
|
||||
|
||||
|
||||
def test_statistics_runs_initiated(hass_recorder):
|
||||
def test_statistics_runs_initiated(hass_recorder: Callable[..., HomeAssistant]) -> None:
|
||||
"""Test statistics_runs is initiated when DB is created."""
|
||||
now = dt_util.utcnow()
|
||||
with patch(
|
||||
@ -1137,7 +1185,7 @@ def test_statistics_runs_initiated(hass_recorder):
|
||||
|
||||
|
||||
@pytest.mark.freeze_time("2022-09-13 09:00:00+02:00")
|
||||
def test_compile_missing_statistics(tmpdir, freezer):
|
||||
def test_compile_missing_statistics(tmpdir, freezer) -> None:
|
||||
"""Test missing statistics are compiled on startup."""
|
||||
now = dt_util.utcnow().replace(minute=0, second=0, microsecond=0)
|
||||
test_db_file = tmpdir.mkdir("sqlite").join("test_run_info.db")
|
||||
@ -1202,7 +1250,7 @@ def test_compile_missing_statistics(tmpdir, freezer):
|
||||
hass.stop()
|
||||
|
||||
|
||||
def test_saving_sets_old_state(hass_recorder):
|
||||
def test_saving_sets_old_state(hass_recorder: Callable[..., HomeAssistant]) -> None:
|
||||
"""Test saving sets old state."""
|
||||
hass = hass_recorder()
|
||||
|
||||
@ -1228,7 +1276,9 @@ def test_saving_sets_old_state(hass_recorder):
|
||||
assert states[3].old_state_id == states[1].state_id
|
||||
|
||||
|
||||
def test_saving_state_with_serializable_data(hass_recorder, caplog):
|
||||
def test_saving_state_with_serializable_data(
|
||||
hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test saving data that cannot be serialized does not crash."""
|
||||
hass = hass_recorder()
|
||||
|
||||
@ -1252,7 +1302,7 @@ def test_saving_state_with_serializable_data(hass_recorder, caplog):
|
||||
assert "State is not JSON serializable" in caplog.text
|
||||
|
||||
|
||||
def test_has_services(hass_recorder):
|
||||
def test_has_services(hass_recorder: Callable[..., HomeAssistant]) -> None:
|
||||
"""Test the services exist."""
|
||||
hass = hass_recorder()
|
||||
|
||||
@ -1262,7 +1312,9 @@ def test_has_services(hass_recorder):
|
||||
assert hass.services.has_service(DOMAIN, SERVICE_PURGE_ENTITIES)
|
||||
|
||||
|
||||
def test_service_disable_events_not_recording(hass_recorder):
|
||||
def test_service_disable_events_not_recording(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test that events are not recorded when recorder is disabled using service."""
|
||||
hass = hass_recorder()
|
||||
|
||||
@ -1337,7 +1389,9 @@ def test_service_disable_events_not_recording(hass_recorder):
|
||||
)
|
||||
|
||||
|
||||
def test_service_disable_states_not_recording(hass_recorder):
|
||||
def test_service_disable_states_not_recording(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test that state changes are not recorded when recorder is disabled using service."""
|
||||
hass = hass_recorder()
|
||||
|
||||
@ -1374,7 +1428,7 @@ def test_service_disable_states_not_recording(hass_recorder):
|
||||
)
|
||||
|
||||
|
||||
def test_service_disable_run_information_recorded(tmpdir):
|
||||
def test_service_disable_run_information_recorded(tmpdir) -> None:
|
||||
"""Test that runs are still recorded when recorder is disabled."""
|
||||
test_db_file = tmpdir.mkdir("sqlite").join("test_run_info.db")
|
||||
dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}"
|
||||
@ -1422,7 +1476,9 @@ class CannotSerializeMe:
|
||||
"""A class that the JSONEncoder cannot serialize."""
|
||||
|
||||
|
||||
async def test_database_corruption_while_running(hass, tmpdir, caplog):
|
||||
async def test_database_corruption_while_running(
|
||||
hass: HomeAssistant, tmpdir, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test we can recover from sqlite3 db corruption."""
|
||||
|
||||
def _create_tmpdir_for_test_db():
|
||||
@ -1493,7 +1549,7 @@ async def test_database_corruption_while_running(hass, tmpdir, caplog):
|
||||
hass.stop()
|
||||
|
||||
|
||||
def test_entity_id_filter(hass_recorder):
|
||||
def test_entity_id_filter(hass_recorder: Callable[..., HomeAssistant]) -> None:
|
||||
"""Test that entity ID filtering filters string and list."""
|
||||
hass = hass_recorder(
|
||||
{"include": {"domains": "hello"}, "exclude": {"domains": "hidden_domain"}}
|
||||
@ -1529,11 +1585,11 @@ def test_entity_id_filter(hass_recorder):
|
||||
|
||||
|
||||
async def test_database_lock_and_unlock(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
recorder_db_url: str,
|
||||
tmp_path,
|
||||
):
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test writing events during lock getting written after unlocking."""
|
||||
if recorder_db_url.startswith(("mysql://", "postgresql://")):
|
||||
# Database locking is only used for SQLite
|
||||
@ -1578,11 +1634,11 @@ async def test_database_lock_and_unlock(
|
||||
|
||||
|
||||
async def test_database_lock_and_overflow(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
recorder_db_url: str,
|
||||
tmp_path,
|
||||
):
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test writing events during lock leading to overflow the queue causes the database to unlock."""
|
||||
if recorder_db_url.startswith(("mysql://", "postgresql://")):
|
||||
# Database locking is only used for SQLite
|
||||
@ -1624,7 +1680,9 @@ async def test_database_lock_and_overflow(
|
||||
assert not instance.unlock_database()
|
||||
|
||||
|
||||
async def test_database_lock_timeout(recorder_mock, hass, recorder_db_url):
|
||||
async def test_database_lock_timeout(
|
||||
recorder_mock: Recorder, hass: HomeAssistant, recorder_db_url: str
|
||||
) -> None:
|
||||
"""Test locking database timeout when recorder stopped."""
|
||||
if recorder_db_url.startswith(("mysql://", "postgresql://")):
|
||||
# This test is specific for SQLite: Locking is not implemented for other engines
|
||||
@ -1651,7 +1709,9 @@ async def test_database_lock_timeout(recorder_mock, hass, recorder_db_url):
|
||||
block_task.event.set()
|
||||
|
||||
|
||||
async def test_database_lock_without_instance(recorder_mock, hass):
|
||||
async def test_database_lock_without_instance(
|
||||
recorder_mock: Recorder, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test database lock doesn't fail if instance is not initialized."""
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP)
|
||||
|
||||
@ -1674,10 +1734,10 @@ async def test_in_memory_database(
|
||||
|
||||
|
||||
async def test_database_connection_keep_alive(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
):
|
||||
) -> None:
|
||||
"""Test we keep alive socket based dialects."""
|
||||
with patch("homeassistant.components.recorder.Recorder.dialect_name"):
|
||||
instance = await async_setup_recorder_instance(hass)
|
||||
@ -1694,11 +1754,11 @@ async def test_database_connection_keep_alive(
|
||||
|
||||
|
||||
async def test_database_connection_keep_alive_disabled_on_sqlite(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
recorder_db_url: str,
|
||||
):
|
||||
) -> None:
|
||||
"""Test we do not do keep alive for sqlite."""
|
||||
if recorder_db_url.startswith(("mysql://", "postgresql://")):
|
||||
# This test is specific for SQLite, keepalive runs on other engines
|
||||
@ -1715,7 +1775,9 @@ async def test_database_connection_keep_alive_disabled_on_sqlite(
|
||||
assert "Sending keepalive" not in caplog.text
|
||||
|
||||
|
||||
def test_deduplication_event_data_inside_commit_interval(hass_recorder, caplog):
|
||||
def test_deduplication_event_data_inside_commit_interval(
|
||||
hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test deduplication of event data inside the commit interval."""
|
||||
hass = hass_recorder()
|
||||
|
||||
@ -1740,7 +1802,9 @@ def test_deduplication_event_data_inside_commit_interval(hass_recorder, caplog):
|
||||
# Patch STATE_ATTRIBUTES_ID_CACHE_SIZE since otherwise
|
||||
# the CI can fail because the test takes too long to run
|
||||
@patch("homeassistant.components.recorder.core.STATE_ATTRIBUTES_ID_CACHE_SIZE", 5)
|
||||
def test_deduplication_state_attributes_inside_commit_interval(hass_recorder, caplog):
|
||||
def test_deduplication_state_attributes_inside_commit_interval(
|
||||
hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test deduplication of state attributes inside the commit interval."""
|
||||
hass = hass_recorder()
|
||||
|
||||
@ -1775,7 +1839,9 @@ def test_deduplication_state_attributes_inside_commit_interval(hass_recorder, ca
|
||||
assert first_attributes_id == last_attributes_id
|
||||
|
||||
|
||||
async def test_async_block_till_done(async_setup_recorder_instance, hass):
|
||||
async def test_async_block_till_done(
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test we can block until recordering is done."""
|
||||
instance = await async_setup_recorder_instance(hass)
|
||||
await async_wait_recording_done(hass)
|
||||
@ -1808,7 +1874,9 @@ async def test_async_block_till_done(async_setup_recorder_instance, hass):
|
||||
("postgresql://blabla", False),
|
||||
),
|
||||
)
|
||||
async def test_disable_echo(hass, db_url, echo, caplog):
|
||||
async def test_disable_echo(
|
||||
hass: HomeAssistant, db_url, echo, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test echo is disabled for non sqlite databases."""
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
|
||||
@ -1864,7 +1932,9 @@ async def test_disable_echo(hass, db_url, echo, caplog):
|
||||
),
|
||||
),
|
||||
)
|
||||
async def test_mysql_missing_utf8mb4(hass, config_url, expected_connect_args):
|
||||
async def test_mysql_missing_utf8mb4(
|
||||
hass: HomeAssistant, config_url, expected_connect_args
|
||||
) -> None:
|
||||
"""Test recorder fails to setup if charset=utf8mb4 is missing from db_url."""
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
|
||||
@ -1894,7 +1964,7 @@ async def test_mysql_missing_utf8mb4(hass, config_url, expected_connect_args):
|
||||
"mysql://user:password@SERVER_IP/DB_NAME?blah=bleh&charset=other",
|
||||
),
|
||||
)
|
||||
async def test_connect_args_priority(hass, config_url):
|
||||
async def test_connect_args_priority(hass: HomeAssistant, config_url) -> None:
|
||||
"""Test connect_args has priority over URL query."""
|
||||
connect_params = []
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
|
@ -291,7 +291,7 @@ async def test_events_during_migration_queue_exhausted(
|
||||
)
|
||||
async def test_schema_migrate(
|
||||
recorder_db_url: str, hass: HomeAssistant, start_version, live
|
||||
):
|
||||
) -> None:
|
||||
"""Test the full schema migration logic.
|
||||
|
||||
We're just testing that the logic can execute successfully here without
|
||||
@ -409,7 +409,7 @@ def test_invalid_update(hass: HomeAssistant) -> None:
|
||||
("sqlite", None),
|
||||
],
|
||||
)
|
||||
def test_modify_column(engine_type, substr):
|
||||
def test_modify_column(engine_type, substr) -> None:
|
||||
"""Test that modify column generates the expected query."""
|
||||
connection = Mock()
|
||||
session = Mock()
|
||||
@ -457,7 +457,9 @@ def test_forgiving_add_index(recorder_db_url: str) -> None:
|
||||
@pytest.mark.parametrize(
|
||||
"exception_type", [OperationalError, ProgrammingError, InternalError]
|
||||
)
|
||||
def test_forgiving_add_index_with_other_db_types(caplog, exception_type):
|
||||
def test_forgiving_add_index_with_other_db_types(
|
||||
caplog: pytest.LogCaptureFixture, exception_type
|
||||
) -> None:
|
||||
"""Test that add index will continue if index exists on mysql and postgres."""
|
||||
mocked_index = Mock()
|
||||
type(mocked_index).name = "ix_states_context_id"
|
||||
|
@ -24,6 +24,7 @@ from homeassistant.components.recorder.models import (
|
||||
)
|
||||
from homeassistant.const import EVENT_STATE_CHANGED
|
||||
import homeassistant.core as ha
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import InvalidEntityFormatError
|
||||
from homeassistant.util import dt, dt as dt_util
|
||||
|
||||
@ -120,7 +121,9 @@ def test_events_repr_without_timestamp() -> None:
|
||||
assert "2016-07-09 11:00:00+00:00" in repr(events)
|
||||
|
||||
|
||||
def test_handling_broken_json_state_attributes(caplog):
|
||||
def test_handling_broken_json_state_attributes(
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test we handle broken json in state attributes."""
|
||||
state_attributes = StateAttributes(
|
||||
attributes_id=444, hash=1234, shared_attrs="{NOT_PARSE}"
|
||||
@ -312,7 +315,9 @@ async def test_event_to_db_model() -> None:
|
||||
assert native.as_dict() == event.as_dict()
|
||||
|
||||
|
||||
async def test_lazy_state_handles_include_json(caplog):
|
||||
async def test_lazy_state_handles_include_json(
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test that the LazyState class handles invalid json."""
|
||||
row = PropertyMock(
|
||||
entity_id="sensor.invalid",
|
||||
@ -322,7 +327,9 @@ async def test_lazy_state_handles_include_json(caplog):
|
||||
assert "Error converting row to state attributes" in caplog.text
|
||||
|
||||
|
||||
async def test_lazy_state_prefers_shared_attrs_over_attrs(caplog):
|
||||
async def test_lazy_state_prefers_shared_attrs_over_attrs(
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test that the LazyState prefers shared_attrs over attributes."""
|
||||
row = PropertyMock(
|
||||
entity_id="sensor.invalid",
|
||||
@ -332,7 +339,9 @@ async def test_lazy_state_prefers_shared_attrs_over_attrs(caplog):
|
||||
assert LazyState(row, {}, None).attributes == {"shared": True}
|
||||
|
||||
|
||||
async def test_lazy_state_handles_different_last_updated_and_last_changed(caplog):
|
||||
async def test_lazy_state_handles_different_last_updated_and_last_changed(
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test that the LazyState handles different last_updated and last_changed."""
|
||||
now = datetime(2021, 6, 12, 3, 4, 1, 323, tzinfo=dt_util.UTC)
|
||||
row = PropertyMock(
|
||||
@ -361,7 +370,9 @@ async def test_lazy_state_handles_different_last_updated_and_last_changed(caplog
|
||||
}
|
||||
|
||||
|
||||
async def test_lazy_state_handles_same_last_updated_and_last_changed(caplog):
|
||||
async def test_lazy_state_handles_same_last_updated_and_last_changed(
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test that the LazyState handles same last_updated and last_changed."""
|
||||
now = datetime(2021, 6, 12, 3, 4, 1, 323, tzinfo=dt_util.UTC)
|
||||
row = PropertyMock(
|
||||
@ -409,7 +420,7 @@ async def test_lazy_state_handles_same_last_updated_and_last_changed(caplog):
|
||||
@pytest.mark.parametrize(
|
||||
"time_zone", ["Europe/Berlin", "America/Chicago", "US/Hawaii", "UTC"]
|
||||
)
|
||||
def test_process_datetime_to_timestamp(time_zone, hass):
|
||||
def test_process_datetime_to_timestamp(time_zone, hass: HomeAssistant) -> None:
|
||||
"""Test we can handle processing database datatimes to timestamps."""
|
||||
hass.config.set_time_zone(time_zone)
|
||||
utc_now = dt_util.utcnow()
|
||||
@ -421,7 +432,9 @@ def test_process_datetime_to_timestamp(time_zone, hass):
|
||||
@pytest.mark.parametrize(
|
||||
"time_zone", ["Europe/Berlin", "America/Chicago", "US/Hawaii", "UTC"]
|
||||
)
|
||||
def test_process_datetime_to_timestamp_freeze_time(time_zone, hass):
|
||||
def test_process_datetime_to_timestamp_freeze_time(
|
||||
time_zone, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test we can handle processing database datatimes to timestamps.
|
||||
|
||||
This test freezes time to make sure everything matches.
|
||||
@ -439,8 +452,8 @@ def test_process_datetime_to_timestamp_freeze_time(time_zone, hass):
|
||||
"time_zone", ["Europe/Berlin", "America/Chicago", "US/Hawaii", "UTC"]
|
||||
)
|
||||
async def test_process_datetime_to_timestamp_mirrors_utc_isoformat_behavior(
|
||||
time_zone, hass
|
||||
):
|
||||
time_zone, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test process_datetime_to_timestamp mirrors process_timestamp_to_utc_isoformat."""
|
||||
hass.config.set_time_zone(time_zone)
|
||||
datetime_with_tzinfo = datetime(2016, 7, 9, 11, 0, 0, tzinfo=dt.UTC)
|
||||
|
@ -16,7 +16,7 @@ async def test_recorder_pool_called_from_event_loop() -> None:
|
||||
sessionmaker(bind=engine)().connection()
|
||||
|
||||
|
||||
def test_recorder_pool(caplog):
|
||||
def test_recorder_pool(caplog: pytest.LogCaptureFixture) -> None:
|
||||
"""Test RecorderPool gives the same connection in the creating thread."""
|
||||
|
||||
engine = create_engine("sqlite://", poolclass=RecorderPool)
|
||||
|
@ -37,7 +37,7 @@ from .common import (
|
||||
async_wait_recording_done,
|
||||
)
|
||||
|
||||
from tests.common import SetupRecorderInstanceT
|
||||
from tests.typing import RecorderInstanceGenerator
|
||||
|
||||
|
||||
@pytest.fixture(name="use_sqlite")
|
||||
@ -53,8 +53,8 @@ def mock_use_sqlite(request):
|
||||
|
||||
|
||||
async def test_purge_old_states(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT, hass: HomeAssistant
|
||||
):
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test deleting old states."""
|
||||
instance = await async_setup_recorder_instance(hass)
|
||||
|
||||
@ -141,10 +141,10 @@ async def test_purge_old_states(
|
||||
|
||||
|
||||
async def test_purge_old_states_encouters_database_corruption(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
recorder_db_url: str,
|
||||
):
|
||||
) -> None:
|
||||
"""Test database image image is malformed while deleting old states."""
|
||||
if recorder_db_url.startswith(("mysql://", "postgresql://")):
|
||||
# This test is specific for SQLite, wiping the database on error only happens
|
||||
@ -178,10 +178,10 @@ async def test_purge_old_states_encouters_database_corruption(
|
||||
|
||||
|
||||
async def test_purge_old_states_encounters_temporary_mysql_error(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
caplog,
|
||||
):
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test retry on specific mysql operational errors."""
|
||||
instance = await async_setup_recorder_instance(hass)
|
||||
|
||||
@ -209,10 +209,10 @@ async def test_purge_old_states_encounters_temporary_mysql_error(
|
||||
|
||||
|
||||
async def test_purge_old_states_encounters_operational_error(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
caplog,
|
||||
):
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test error on operational errors that are not mysql does not retry."""
|
||||
await async_setup_recorder_instance(hass)
|
||||
|
||||
@ -235,8 +235,8 @@ async def test_purge_old_states_encounters_operational_error(
|
||||
|
||||
|
||||
async def test_purge_old_events(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT, hass: HomeAssistant
|
||||
):
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test deleting old events."""
|
||||
instance = await async_setup_recorder_instance(hass)
|
||||
|
||||
@ -272,8 +272,8 @@ async def test_purge_old_events(
|
||||
|
||||
|
||||
async def test_purge_old_recorder_runs(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT, hass: HomeAssistant
|
||||
):
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test deleting old recorder runs keeps current run."""
|
||||
instance = await async_setup_recorder_instance(hass)
|
||||
|
||||
@ -308,8 +308,8 @@ async def test_purge_old_recorder_runs(
|
||||
|
||||
|
||||
async def test_purge_old_statistics_runs(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT, hass: HomeAssistant
|
||||
):
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test deleting old statistics runs keeps the latest run."""
|
||||
instance = await async_setup_recorder_instance(hass)
|
||||
|
||||
@ -333,11 +333,11 @@ async def test_purge_old_statistics_runs(
|
||||
|
||||
@pytest.mark.parametrize("use_sqlite", (True, False), indirect=True)
|
||||
async def test_purge_method(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
use_sqlite: bool,
|
||||
):
|
||||
) -> None:
|
||||
"""Test purge method."""
|
||||
|
||||
def assert_recorder_runs_equal(run1, run2):
|
||||
@ -452,10 +452,10 @@ async def test_purge_method(
|
||||
|
||||
@pytest.mark.parametrize("use_sqlite", (True, False), indirect=True)
|
||||
async def test_purge_edge_case(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
use_sqlite: bool,
|
||||
):
|
||||
) -> None:
|
||||
"""Test states and events are purged even if they occurred shortly before purge_before."""
|
||||
|
||||
async def _add_db_entries(hass: HomeAssistant, timestamp: datetime) -> None:
|
||||
@ -519,9 +519,9 @@ async def test_purge_edge_case(
|
||||
|
||||
|
||||
async def test_purge_cutoff_date(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
):
|
||||
) -> None:
|
||||
"""Test states and events are purged only if they occurred before "now() - keep_days"."""
|
||||
|
||||
async def _add_db_entries(hass: HomeAssistant, cutoff: datetime, rows: int) -> None:
|
||||
@ -667,10 +667,10 @@ async def test_purge_cutoff_date(
|
||||
|
||||
@pytest.mark.parametrize("use_sqlite", (True, False), indirect=True)
|
||||
async def test_purge_filtered_states(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
use_sqlite: bool,
|
||||
):
|
||||
) -> None:
|
||||
"""Test filtered states are purged."""
|
||||
config: ConfigType = {"exclude": {"entities": ["sensor.excluded"]}}
|
||||
instance = await async_setup_recorder_instance(hass, config)
|
||||
@ -853,10 +853,10 @@ async def test_purge_filtered_states(
|
||||
|
||||
@pytest.mark.parametrize("use_sqlite", (True, False), indirect=True)
|
||||
async def test_purge_filtered_states_to_empty(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
use_sqlite: bool,
|
||||
):
|
||||
) -> None:
|
||||
"""Test filtered states are purged all the way to an empty db."""
|
||||
config: ConfigType = {"exclude": {"entities": ["sensor.excluded"]}}
|
||||
instance = await async_setup_recorder_instance(hass, config)
|
||||
@ -906,10 +906,10 @@ async def test_purge_filtered_states_to_empty(
|
||||
|
||||
@pytest.mark.parametrize("use_sqlite", (True, False), indirect=True)
|
||||
async def test_purge_without_state_attributes_filtered_states_to_empty(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
use_sqlite: bool,
|
||||
):
|
||||
) -> None:
|
||||
"""Test filtered legacy states without state attributes are purged all the way to an empty db."""
|
||||
config: ConfigType = {"exclude": {"entities": ["sensor.old_format"]}}
|
||||
instance = await async_setup_recorder_instance(hass, config)
|
||||
@ -980,9 +980,9 @@ async def test_purge_without_state_attributes_filtered_states_to_empty(
|
||||
|
||||
|
||||
async def test_purge_filtered_events(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
):
|
||||
) -> None:
|
||||
"""Test filtered events are purged."""
|
||||
config: ConfigType = {"exclude": {"event_types": ["EVENT_PURGE"]}}
|
||||
await async_setup_recorder_instance(hass, config)
|
||||
@ -1068,9 +1068,9 @@ async def test_purge_filtered_events(
|
||||
|
||||
|
||||
async def test_purge_filtered_events_state_changed(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
):
|
||||
) -> None:
|
||||
"""Test filtered state_changed events are purged. This should also remove all states."""
|
||||
config: ConfigType = {"exclude": {"event_types": [EVENT_STATE_CHANGED]}}
|
||||
instance = await async_setup_recorder_instance(hass, config)
|
||||
@ -1171,8 +1171,8 @@ async def test_purge_filtered_events_state_changed(
|
||||
|
||||
|
||||
async def test_purge_entities(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT, hass: HomeAssistant
|
||||
):
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test purging of specific entities."""
|
||||
await async_setup_recorder_instance(hass)
|
||||
|
||||
@ -1543,8 +1543,8 @@ def _add_state_and_state_changed_event(
|
||||
|
||||
|
||||
async def test_purge_many_old_events(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT, hass: HomeAssistant
|
||||
):
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test deleting old events."""
|
||||
instance = await async_setup_recorder_instance(hass)
|
||||
|
||||
@ -1591,8 +1591,8 @@ async def test_purge_many_old_events(
|
||||
|
||||
|
||||
async def test_purge_can_mix_legacy_and_new_format(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT, hass: HomeAssistant
|
||||
):
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test purging with legacy a new events."""
|
||||
instance = await async_setup_recorder_instance(hass)
|
||||
utcnow = dt_util.utcnow()
|
||||
|
@ -1,18 +1,18 @@
|
||||
"""Test run history."""
|
||||
|
||||
from datetime import timedelta
|
||||
from unittest.mock import patch
|
||||
|
||||
from homeassistant.components import recorder
|
||||
from homeassistant.components.recorder import Recorder
|
||||
from homeassistant.components.recorder.db_schema import RecorderRuns
|
||||
from homeassistant.components.recorder.models import process_timestamp
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from tests.common import SetupRecorderInstanceT
|
||||
from tests.typing import RecorderInstanceGenerator
|
||||
|
||||
|
||||
async def test_run_history(recorder_mock, hass):
|
||||
async def test_run_history(recorder_mock: Recorder, hass: HomeAssistant) -> None:
|
||||
"""Test the run history gives the correct run."""
|
||||
instance = recorder.get_instance(hass)
|
||||
now = dt_util.utcnow()
|
||||
@ -52,7 +52,7 @@ async def test_run_history(recorder_mock, hass):
|
||||
|
||||
|
||||
async def test_run_history_while_recorder_is_not_yet_started(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
recorder_db_url: str,
|
||||
) -> None:
|
||||
|
@ -1,4 +1,6 @@
|
||||
"""The tests for sensor recorder platform."""
|
||||
from collections.abc import Callable
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
from datetime import datetime, timedelta
|
||||
import importlib
|
||||
@ -11,7 +13,7 @@ from sqlalchemy.exc import OperationalError
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from homeassistant.components import recorder
|
||||
from homeassistant.components.recorder import history, statistics
|
||||
from homeassistant.components.recorder import Recorder, history, statistics
|
||||
from homeassistant.components.recorder.const import SQLITE_URL_PREFIX
|
||||
from homeassistant.components.recorder.db_schema import StatisticsShortTerm
|
||||
from homeassistant.components.recorder.models import (
|
||||
@ -35,7 +37,7 @@ from homeassistant.components.recorder.statistics import (
|
||||
from homeassistant.components.recorder.util import session_scope
|
||||
from homeassistant.components.sensor import UNIT_CONVERTERS
|
||||
from homeassistant.const import UnitOfTemperature
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import recorder as recorder_helper
|
||||
from homeassistant.setup import setup_component
|
||||
@ -50,6 +52,7 @@ from .common import (
|
||||
)
|
||||
|
||||
from tests.common import get_test_home_assistant, mock_registry
|
||||
from tests.typing import RecorderInstanceGenerator, WebSocketGenerator
|
||||
|
||||
ORIG_TZ = dt_util.DEFAULT_TIME_ZONE
|
||||
|
||||
@ -63,7 +66,7 @@ def test_converters_align_with_sensor() -> None:
|
||||
assert converter in UNIT_CONVERTERS.values()
|
||||
|
||||
|
||||
def test_compile_hourly_statistics(hass_recorder):
|
||||
def test_compile_hourly_statistics(hass_recorder: Callable[..., HomeAssistant]) -> None:
|
||||
"""Test compiling hourly statistics."""
|
||||
hass = hass_recorder()
|
||||
instance = recorder.get_instance(hass)
|
||||
@ -266,8 +269,8 @@ def mock_from_stats():
|
||||
|
||||
|
||||
def test_compile_periodic_statistics_exception(
|
||||
hass_recorder, mock_sensor_statistics, mock_from_stats
|
||||
):
|
||||
hass_recorder: Callable[..., HomeAssistant], mock_sensor_statistics, mock_from_stats
|
||||
) -> None:
|
||||
"""Test exception handling when compiling periodic statistics."""
|
||||
|
||||
hass = hass_recorder()
|
||||
@ -309,7 +312,7 @@ def test_compile_periodic_statistics_exception(
|
||||
}
|
||||
|
||||
|
||||
def test_rename_entity(hass_recorder):
|
||||
def test_rename_entity(hass_recorder: Callable[..., HomeAssistant]) -> None:
|
||||
"""Test statistics is migrated when entity_id is changed."""
|
||||
hass = hass_recorder()
|
||||
setup_component(hass, "sensor", {})
|
||||
@ -375,7 +378,9 @@ def test_rename_entity(hass_recorder):
|
||||
assert stats == {"sensor.test99": expected_stats99, "sensor.test2": expected_stats2}
|
||||
|
||||
|
||||
def test_rename_entity_collision(hass_recorder, caplog):
|
||||
def test_rename_entity_collision(
|
||||
hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test statistics is migrated when entity_id is changed."""
|
||||
hass = hass_recorder()
|
||||
setup_component(hass, "sensor", {})
|
||||
@ -456,7 +461,9 @@ def test_rename_entity_collision(hass_recorder, caplog):
|
||||
assert "Blocked attempt to insert duplicated statistic rows" in caplog.text
|
||||
|
||||
|
||||
def test_statistics_duplicated(hass_recorder, caplog):
|
||||
def test_statistics_duplicated(
|
||||
hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test statistics with same start time is not compiled."""
|
||||
hass = hass_recorder()
|
||||
setup_component(hass, "sensor", {})
|
||||
@ -498,15 +505,15 @@ def test_statistics_duplicated(hass_recorder, caplog):
|
||||
),
|
||||
)
|
||||
async def test_import_statistics(
|
||||
recorder_mock,
|
||||
hass,
|
||||
hass_ws_client,
|
||||
caplog,
|
||||
recorder_mock: Recorder,
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
source,
|
||||
statistic_id,
|
||||
import_fn,
|
||||
last_reset_str,
|
||||
):
|
||||
) -> None:
|
||||
"""Test importing statistics and inserting external statistics."""
|
||||
client = await hass_ws_client()
|
||||
|
||||
@ -760,7 +767,9 @@ async def test_import_statistics(
|
||||
}
|
||||
|
||||
|
||||
def test_external_statistics_errors(hass_recorder, caplog):
|
||||
def test_external_statistics_errors(
|
||||
hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test validation of external statistics."""
|
||||
hass = hass_recorder()
|
||||
wait_recording_done(hass)
|
||||
@ -847,7 +856,9 @@ def test_external_statistics_errors(hass_recorder, caplog):
|
||||
assert get_metadata(hass, statistic_ids=("test:total_energy_import",)) == {}
|
||||
|
||||
|
||||
def test_import_statistics_errors(hass_recorder, caplog):
|
||||
def test_import_statistics_errors(
|
||||
hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test validation of imported statistics."""
|
||||
hass = hass_recorder()
|
||||
wait_recording_done(hass)
|
||||
@ -936,7 +947,11 @@ def test_import_statistics_errors(hass_recorder, caplog):
|
||||
|
||||
@pytest.mark.parametrize("timezone", ["America/Regina", "Europe/Vienna", "UTC"])
|
||||
@pytest.mark.freeze_time("2022-10-01 00:00:00+00:00")
|
||||
def test_weekly_statistics(hass_recorder, caplog, timezone):
|
||||
def test_weekly_statistics(
|
||||
hass_recorder: Callable[..., HomeAssistant],
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
timezone,
|
||||
) -> None:
|
||||
"""Test weekly statistics."""
|
||||
dt_util.set_default_time_zone(dt_util.get_time_zone(timezone))
|
||||
|
||||
@ -1070,7 +1085,11 @@ def test_weekly_statistics(hass_recorder, caplog, timezone):
|
||||
|
||||
@pytest.mark.parametrize("timezone", ["America/Regina", "Europe/Vienna", "UTC"])
|
||||
@pytest.mark.freeze_time("2021-08-01 00:00:00+00:00")
|
||||
def test_monthly_statistics(hass_recorder, caplog, timezone):
|
||||
def test_monthly_statistics(
|
||||
hass_recorder: Callable[..., HomeAssistant],
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
timezone,
|
||||
) -> None:
|
||||
"""Test monthly statistics."""
|
||||
dt_util.set_default_time_zone(dt_util.get_time_zone(timezone))
|
||||
|
||||
@ -1206,7 +1225,9 @@ def test_monthly_statistics(hass_recorder, caplog, timezone):
|
||||
dt_util.set_default_time_zone(dt_util.get_time_zone("UTC"))
|
||||
|
||||
|
||||
def test_delete_duplicates_no_duplicates(hass_recorder, caplog):
|
||||
def test_delete_duplicates_no_duplicates(
|
||||
hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test removal of duplicated statistics."""
|
||||
hass = hass_recorder()
|
||||
wait_recording_done(hass)
|
||||
@ -1217,7 +1238,9 @@ def test_delete_duplicates_no_duplicates(hass_recorder, caplog):
|
||||
assert "Found duplicated" not in caplog.text
|
||||
|
||||
|
||||
def test_duplicate_statistics_handle_integrity_error(hass_recorder, caplog):
|
||||
def test_duplicate_statistics_handle_integrity_error(
|
||||
hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test the recorder does not blow up if statistics is duplicated."""
|
||||
hass = hass_recorder()
|
||||
wait_recording_done(hass)
|
||||
@ -1297,7 +1320,7 @@ def _create_engine_28(*args, **kwargs):
|
||||
return engine
|
||||
|
||||
|
||||
def test_delete_metadata_duplicates(caplog, tmpdir):
|
||||
def test_delete_metadata_duplicates(caplog: pytest.LogCaptureFixture, tmpdir) -> None:
|
||||
"""Test removal of duplicated statistics."""
|
||||
test_db_file = tmpdir.mkdir("sqlite").join("test_run_info.db")
|
||||
dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}"
|
||||
@ -1388,7 +1411,9 @@ def test_delete_metadata_duplicates(caplog, tmpdir):
|
||||
dt_util.DEFAULT_TIME_ZONE = ORIG_TZ
|
||||
|
||||
|
||||
def test_delete_metadata_duplicates_many(caplog, tmpdir):
|
||||
def test_delete_metadata_duplicates_many(
|
||||
caplog: pytest.LogCaptureFixture, tmpdir
|
||||
) -> None:
|
||||
"""Test removal of duplicated statistics."""
|
||||
test_db_file = tmpdir.mkdir("sqlite").join("test_run_info.db")
|
||||
dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}"
|
||||
@ -1483,7 +1508,9 @@ def test_delete_metadata_duplicates_many(caplog, tmpdir):
|
||||
dt_util.DEFAULT_TIME_ZONE = ORIG_TZ
|
||||
|
||||
|
||||
def test_delete_metadata_duplicates_no_duplicates(hass_recorder, caplog):
|
||||
def test_delete_metadata_duplicates_no_duplicates(
|
||||
hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test removal of duplicated statistics."""
|
||||
hass = hass_recorder()
|
||||
wait_recording_done(hass)
|
||||
@ -1495,8 +1522,11 @@ def test_delete_metadata_duplicates_no_duplicates(hass_recorder, caplog):
|
||||
@pytest.mark.parametrize("enable_statistics_table_validation", [True])
|
||||
@pytest.mark.parametrize("db_engine", ("mysql", "postgresql"))
|
||||
async def test_validate_db_schema(
|
||||
async_setup_recorder_instance, hass, caplog, db_engine
|
||||
):
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
db_engine,
|
||||
) -> None:
|
||||
"""Test validating DB schema with MySQL and PostgreSQL.
|
||||
|
||||
Note: The test uses SQLite, the purpose is only to exercise the code.
|
||||
@ -1513,8 +1543,10 @@ async def test_validate_db_schema(
|
||||
|
||||
@pytest.mark.parametrize("enable_statistics_table_validation", [True])
|
||||
async def test_validate_db_schema_fix_utf8_issue(
|
||||
async_setup_recorder_instance, hass, caplog
|
||||
):
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test validating DB schema with MySQL.
|
||||
|
||||
Note: The test uses SQLite, the purpose is only to exercise the code.
|
||||
@ -1553,15 +1585,15 @@ async def test_validate_db_schema_fix_utf8_issue(
|
||||
(("max", 1.0), ("mean", 1.0), ("min", 1.0), ("state", 1.0), ("sum", 1.0)),
|
||||
)
|
||||
async def test_validate_db_schema_fix_float_issue(
|
||||
async_setup_recorder_instance,
|
||||
hass,
|
||||
caplog,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
db_engine,
|
||||
table,
|
||||
replace_index,
|
||||
column,
|
||||
value,
|
||||
):
|
||||
) -> None:
|
||||
"""Test validating DB schema with MySQL.
|
||||
|
||||
Note: The test uses SQLite, the purpose is only to exercise the code.
|
||||
@ -1639,16 +1671,16 @@ async def test_validate_db_schema_fix_float_issue(
|
||||
),
|
||||
)
|
||||
async def test_validate_db_schema_fix_statistics_datetime_issue(
|
||||
async_setup_recorder_instance,
|
||||
hass,
|
||||
caplog,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
db_engine,
|
||||
modification,
|
||||
table,
|
||||
replace_index,
|
||||
column,
|
||||
value,
|
||||
):
|
||||
) -> None:
|
||||
"""Test validating DB schema with MySQL.
|
||||
|
||||
Note: The test uses SQLite, the purpose is only to exercise the code.
|
||||
|
@ -52,7 +52,7 @@ def _create_engine_test(*args, **kwargs):
|
||||
return engine
|
||||
|
||||
|
||||
def test_delete_duplicates(caplog, tmpdir):
|
||||
def test_delete_duplicates(caplog: pytest.LogCaptureFixture, tmpdir) -> None:
|
||||
"""Test removal of duplicated statistics."""
|
||||
test_db_file = tmpdir.mkdir("sqlite").join("test_run_info.db")
|
||||
dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}"
|
||||
@ -222,7 +222,7 @@ def test_delete_duplicates(caplog, tmpdir):
|
||||
assert "Found duplicated" not in caplog.text
|
||||
|
||||
|
||||
def test_delete_duplicates_many(caplog, tmpdir):
|
||||
def test_delete_duplicates_many(caplog: pytest.LogCaptureFixture, tmpdir) -> None:
|
||||
"""Test removal of duplicated statistics."""
|
||||
test_db_file = tmpdir.mkdir("sqlite").join("test_run_info.db")
|
||||
dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}"
|
||||
@ -399,7 +399,9 @@ def test_delete_duplicates_many(caplog, tmpdir):
|
||||
|
||||
|
||||
@pytest.mark.freeze_time("2021-08-01 00:00:00+00:00")
|
||||
def test_delete_duplicates_non_identical(caplog, tmpdir):
|
||||
def test_delete_duplicates_non_identical(
|
||||
caplog: pytest.LogCaptureFixture, tmpdir
|
||||
) -> None:
|
||||
"""Test removal of duplicated statistics."""
|
||||
test_db_file = tmpdir.mkdir("sqlite").join("test_run_info.db")
|
||||
dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}"
|
||||
@ -570,7 +572,7 @@ def test_delete_duplicates_non_identical(caplog, tmpdir):
|
||||
]
|
||||
|
||||
|
||||
def test_delete_duplicates_short_term(caplog, tmpdir):
|
||||
def test_delete_duplicates_short_term(caplog: pytest.LogCaptureFixture, tmpdir) -> None:
|
||||
"""Test removal of duplicated statistics."""
|
||||
test_db_file = tmpdir.mkdir("sqlite").join("test_run_info.db")
|
||||
dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}"
|
||||
|
@ -1,20 +1,22 @@
|
||||
"""Test recorder system health."""
|
||||
|
||||
from unittest.mock import ANY, Mock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.recorder import get_instance
|
||||
from homeassistant.components.recorder import Recorder, get_instance
|
||||
from homeassistant.components.recorder.const import SupportedDialect
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from .common import async_wait_recording_done
|
||||
|
||||
from tests.common import SetupRecorderInstanceT, get_system_health_info
|
||||
from tests.common import get_system_health_info
|
||||
from tests.typing import RecorderInstanceGenerator
|
||||
|
||||
|
||||
async def test_recorder_system_health(recorder_mock, hass, recorder_db_url):
|
||||
async def test_recorder_system_health(
|
||||
recorder_mock: Recorder, hass: HomeAssistant, recorder_db_url: str
|
||||
) -> None:
|
||||
"""Test recorder system health."""
|
||||
if recorder_db_url.startswith(("mysql://", "postgresql://")):
|
||||
# This test is specific for SQLite
|
||||
@ -36,7 +38,9 @@ async def test_recorder_system_health(recorder_mock, hass, recorder_db_url):
|
||||
@pytest.mark.parametrize(
|
||||
"dialect_name", [SupportedDialect.MYSQL, SupportedDialect.POSTGRESQL]
|
||||
)
|
||||
async def test_recorder_system_health_alternate_dbms(recorder_mock, hass, dialect_name):
|
||||
async def test_recorder_system_health_alternate_dbms(
|
||||
recorder_mock: Recorder, hass: HomeAssistant, dialect_name
|
||||
) -> None:
|
||||
"""Test recorder system health."""
|
||||
assert await async_setup_component(hass, "system_health", {})
|
||||
await async_wait_recording_done(hass)
|
||||
@ -61,8 +65,8 @@ async def test_recorder_system_health_alternate_dbms(recorder_mock, hass, dialec
|
||||
"dialect_name", [SupportedDialect.MYSQL, SupportedDialect.POSTGRESQL]
|
||||
)
|
||||
async def test_recorder_system_health_db_url_missing_host(
|
||||
recorder_mock, hass, dialect_name
|
||||
):
|
||||
recorder_mock: Recorder, hass: HomeAssistant, dialect_name
|
||||
) -> None:
|
||||
"""Test recorder system health with a db_url without a hostname."""
|
||||
assert await async_setup_component(hass, "system_health", {})
|
||||
await async_wait_recording_done(hass)
|
||||
@ -89,10 +93,10 @@ async def test_recorder_system_health_db_url_missing_host(
|
||||
|
||||
|
||||
async def test_recorder_system_health_crashed_recorder_runs_table(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
recorder_db_url: str,
|
||||
):
|
||||
) -> None:
|
||||
"""Test recorder system health with crashed recorder runs table."""
|
||||
if recorder_db_url.startswith(("mysql://", "postgresql://")):
|
||||
# This test is specific for SQLite
|
||||
|
@ -1,6 +1,8 @@
|
||||
"""Test util methods."""
|
||||
from collections.abc import Callable
|
||||
from datetime import datetime, timedelta, timezone
|
||||
import os
|
||||
from pathlib import Path
|
||||
import sqlite3
|
||||
from unittest.mock import MagicMock, Mock, patch
|
||||
|
||||
@ -33,10 +35,11 @@ from homeassistant.util import dt as dt_util
|
||||
|
||||
from .common import corrupt_db_file, run_information_with_session, wait_recording_done
|
||||
|
||||
from tests.common import SetupRecorderInstanceT, async_test_home_assistant
|
||||
from tests.common import async_test_home_assistant
|
||||
from tests.typing import RecorderInstanceGenerator
|
||||
|
||||
|
||||
def test_session_scope_not_setup(hass_recorder):
|
||||
def test_session_scope_not_setup(hass_recorder: Callable[..., HomeAssistant]) -> None:
|
||||
"""Try to create a session scope when not setup."""
|
||||
hass = hass_recorder()
|
||||
with patch.object(
|
||||
@ -45,7 +48,7 @@ def test_session_scope_not_setup(hass_recorder):
|
||||
pass
|
||||
|
||||
|
||||
def test_recorder_bad_execute(hass_recorder):
|
||||
def test_recorder_bad_execute(hass_recorder: Callable[..., HomeAssistant]) -> None:
|
||||
"""Bad execute, retry 3 times."""
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
|
||||
@ -66,7 +69,9 @@ def test_recorder_bad_execute(hass_recorder):
|
||||
assert e_mock.call_count == 2
|
||||
|
||||
|
||||
def test_validate_or_move_away_sqlite_database(hass, tmpdir, caplog):
|
||||
def test_validate_or_move_away_sqlite_database(
|
||||
hass: HomeAssistant, tmpdir, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Ensure a malformed sqlite database is moved away."""
|
||||
|
||||
test_dir = tmpdir.mkdir("test_validate_or_move_away_sqlite_database")
|
||||
@ -91,8 +96,8 @@ def test_validate_or_move_away_sqlite_database(hass, tmpdir, caplog):
|
||||
|
||||
|
||||
async def test_last_run_was_recently_clean(
|
||||
event_loop, async_setup_recorder_instance: SetupRecorderInstanceT, tmp_path
|
||||
):
|
||||
event_loop, async_setup_recorder_instance: RecorderInstanceGenerator, tmp_path: Path
|
||||
) -> None:
|
||||
"""Test we can check if the last recorder run was recently clean."""
|
||||
config = {
|
||||
recorder.CONF_DB_URL: "sqlite:///" + str(tmp_path / "pytest.db"),
|
||||
@ -158,7 +163,7 @@ async def test_last_run_was_recently_clean(
|
||||
"mysql_version",
|
||||
["10.3.0-MariaDB", "8.0.0"],
|
||||
)
|
||||
def test_setup_connection_for_dialect_mysql(mysql_version):
|
||||
def test_setup_connection_for_dialect_mysql(mysql_version) -> None:
|
||||
"""Test setting up the connection for a mysql dialect."""
|
||||
instance_mock = MagicMock()
|
||||
execute_args = []
|
||||
@ -191,7 +196,7 @@ def test_setup_connection_for_dialect_mysql(mysql_version):
|
||||
"sqlite_version",
|
||||
["3.31.0"],
|
||||
)
|
||||
def test_setup_connection_for_dialect_sqlite(sqlite_version):
|
||||
def test_setup_connection_for_dialect_sqlite(sqlite_version) -> None:
|
||||
"""Test setting up the connection for a sqlite dialect."""
|
||||
instance_mock = MagicMock()
|
||||
execute_args = []
|
||||
@ -246,7 +251,7 @@ def test_setup_connection_for_dialect_sqlite(sqlite_version):
|
||||
)
|
||||
def test_setup_connection_for_dialect_sqlite_zero_commit_interval(
|
||||
sqlite_version,
|
||||
):
|
||||
) -> None:
|
||||
"""Test setting up the connection for a sqlite dialect with a zero commit interval."""
|
||||
instance_mock = MagicMock(commit_interval=0)
|
||||
execute_args = []
|
||||
@ -312,7 +317,9 @@ def test_setup_connection_for_dialect_sqlite_zero_commit_interval(
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_fail_outdated_mysql(caplog, mysql_version, message):
|
||||
def test_fail_outdated_mysql(
|
||||
caplog: pytest.LogCaptureFixture, mysql_version, message
|
||||
) -> None:
|
||||
"""Test setting up the connection for an outdated mysql version."""
|
||||
instance_mock = MagicMock()
|
||||
execute_args = []
|
||||
@ -348,7 +355,7 @@ def test_fail_outdated_mysql(caplog, mysql_version, message):
|
||||
("8.0.0"),
|
||||
],
|
||||
)
|
||||
def test_supported_mysql(caplog, mysql_version):
|
||||
def test_supported_mysql(caplog: pytest.LogCaptureFixture, mysql_version) -> None:
|
||||
"""Test setting up the connection for a supported mysql version."""
|
||||
instance_mock = MagicMock()
|
||||
execute_args = []
|
||||
@ -391,7 +398,9 @@ def test_supported_mysql(caplog, mysql_version):
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_fail_outdated_pgsql(caplog, pgsql_version, message):
|
||||
def test_fail_outdated_pgsql(
|
||||
caplog: pytest.LogCaptureFixture, pgsql_version, message
|
||||
) -> None:
|
||||
"""Test setting up the connection for an outdated PostgreSQL version."""
|
||||
instance_mock = MagicMock()
|
||||
execute_args = []
|
||||
@ -424,7 +433,7 @@ def test_fail_outdated_pgsql(caplog, pgsql_version, message):
|
||||
"pgsql_version",
|
||||
["14.0 (Debian 14.0-1.pgdg110+1)"],
|
||||
)
|
||||
def test_supported_pgsql(caplog, pgsql_version):
|
||||
def test_supported_pgsql(caplog: pytest.LogCaptureFixture, pgsql_version) -> None:
|
||||
"""Test setting up the connection for a supported PostgreSQL version."""
|
||||
instance_mock = MagicMock()
|
||||
execute_args = []
|
||||
@ -471,7 +480,9 @@ def test_supported_pgsql(caplog, pgsql_version):
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_fail_outdated_sqlite(caplog, sqlite_version, message):
|
||||
def test_fail_outdated_sqlite(
|
||||
caplog: pytest.LogCaptureFixture, sqlite_version, message
|
||||
) -> None:
|
||||
"""Test setting up the connection for an outdated sqlite version."""
|
||||
instance_mock = MagicMock()
|
||||
execute_args = []
|
||||
@ -507,7 +518,7 @@ def test_fail_outdated_sqlite(caplog, sqlite_version, message):
|
||||
("3.33.0"),
|
||||
],
|
||||
)
|
||||
def test_supported_sqlite(caplog, sqlite_version):
|
||||
def test_supported_sqlite(caplog: pytest.LogCaptureFixture, sqlite_version) -> None:
|
||||
"""Test setting up the connection for a supported sqlite version."""
|
||||
instance_mock = MagicMock()
|
||||
execute_args = []
|
||||
@ -545,7 +556,9 @@ def test_supported_sqlite(caplog, sqlite_version):
|
||||
("some_db", "Database some_db is not supported"),
|
||||
],
|
||||
)
|
||||
def test_warn_unsupported_dialect(caplog, dialect, message):
|
||||
def test_warn_unsupported_dialect(
|
||||
caplog: pytest.LogCaptureFixture, dialect, message
|
||||
) -> None:
|
||||
"""Test setting up the connection for an outdated sqlite version."""
|
||||
instance_mock = MagicMock()
|
||||
dbapi_connection = MagicMock()
|
||||
@ -580,8 +593,8 @@ def test_warn_unsupported_dialect(caplog, dialect, message):
|
||||
],
|
||||
)
|
||||
async def test_issue_for_mariadb_with_MDEV_25020(
|
||||
hass, caplog, mysql_version, min_version
|
||||
):
|
||||
hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mysql_version, min_version
|
||||
) -> None:
|
||||
"""Test we create an issue for MariaDB versions affected.
|
||||
|
||||
See https://jira.mariadb.org/browse/MDEV-25020.
|
||||
@ -634,7 +647,9 @@ async def test_issue_for_mariadb_with_MDEV_25020(
|
||||
"10.9.1-MariaDB",
|
||||
],
|
||||
)
|
||||
async def test_no_issue_for_mariadb_with_MDEV_25020(hass, caplog, mysql_version):
|
||||
async def test_no_issue_for_mariadb_with_MDEV_25020(
|
||||
hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mysql_version
|
||||
) -> None:
|
||||
"""Test we do not create an issue for MariaDB versions not affected.
|
||||
|
||||
See https://jira.mariadb.org/browse/MDEV-25020.
|
||||
@ -676,7 +691,9 @@ async def test_no_issue_for_mariadb_with_MDEV_25020(hass, caplog, mysql_version)
|
||||
assert database_engine.optimizer.slow_range_in_select is False
|
||||
|
||||
|
||||
def test_basic_sanity_check(hass_recorder, recorder_db_url):
|
||||
def test_basic_sanity_check(
|
||||
hass_recorder: Callable[..., HomeAssistant], recorder_db_url
|
||||
) -> None:
|
||||
"""Test the basic sanity checks with a missing table."""
|
||||
if recorder_db_url.startswith(("mysql://", "postgresql://")):
|
||||
# This test is specific for SQLite
|
||||
@ -694,7 +711,11 @@ def test_basic_sanity_check(hass_recorder, recorder_db_url):
|
||||
util.basic_sanity_check(cursor)
|
||||
|
||||
|
||||
def test_combined_checks(hass_recorder, caplog, recorder_db_url):
|
||||
def test_combined_checks(
|
||||
hass_recorder: Callable[..., HomeAssistant],
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
recorder_db_url,
|
||||
) -> None:
|
||||
"""Run Checks on the open database."""
|
||||
if recorder_db_url.startswith(("mysql://", "postgresql://")):
|
||||
# This test is specific for SQLite
|
||||
@ -752,7 +773,9 @@ def test_combined_checks(hass_recorder, caplog, recorder_db_url):
|
||||
util.run_checks_on_open_db("fake_db_path", cursor)
|
||||
|
||||
|
||||
def test_end_incomplete_runs(hass_recorder, caplog):
|
||||
def test_end_incomplete_runs(
|
||||
hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Ensure we can end incomplete runs."""
|
||||
hass = hass_recorder()
|
||||
|
||||
@ -776,7 +799,9 @@ def test_end_incomplete_runs(hass_recorder, caplog):
|
||||
assert "Ended unfinished session" in caplog.text
|
||||
|
||||
|
||||
def test_periodic_db_cleanups(hass_recorder, recorder_db_url):
|
||||
def test_periodic_db_cleanups(
|
||||
hass_recorder: Callable[..., HomeAssistant], recorder_db_url
|
||||
) -> None:
|
||||
"""Test periodic db cleanups."""
|
||||
if recorder_db_url.startswith(("mysql://", "postgresql://")):
|
||||
# This test is specific for SQLite
|
||||
@ -796,10 +821,10 @@ def test_periodic_db_cleanups(hass_recorder, recorder_db_url):
|
||||
@patch("homeassistant.components.recorder.pool.check_loop")
|
||||
async def test_write_lock_db(
|
||||
skip_check_loop,
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
tmp_path,
|
||||
):
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test database write lock."""
|
||||
from sqlalchemy.exc import OperationalError
|
||||
|
||||
@ -854,7 +879,9 @@ def test_build_mysqldb_conv() -> None:
|
||||
|
||||
|
||||
@patch("homeassistant.components.recorder.util.QUERY_RETRY_WAIT", 0)
|
||||
def test_execute_stmt_lambda_element(hass_recorder):
|
||||
def test_execute_stmt_lambda_element(
|
||||
hass_recorder: Callable[..., HomeAssistant]
|
||||
) -> None:
|
||||
"""Test executing with execute_stmt_lambda_element."""
|
||||
hass = hass_recorder()
|
||||
instance = recorder.get_instance(hass)
|
||||
|
@ -5,6 +5,7 @@ import importlib
|
||||
import sys
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
@ -48,7 +49,7 @@ def _create_engine_test(*args, **kwargs):
|
||||
return engine
|
||||
|
||||
|
||||
def test_migrate_times(caplog, tmpdir):
|
||||
def test_migrate_times(caplog: pytest.LogCaptureFixture, tmpdir) -> None:
|
||||
"""Test we can migrate times."""
|
||||
test_db_file = tmpdir.mkdir("sqlite").join("test_run_info.db")
|
||||
dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}"
|
||||
|
@ -10,6 +10,7 @@ from freezegun import freeze_time
|
||||
import pytest
|
||||
|
||||
from homeassistant.components import recorder
|
||||
from homeassistant.components.recorder import Recorder
|
||||
from homeassistant.components.recorder.db_schema import Statistics, StatisticsShortTerm
|
||||
from homeassistant.components.recorder.statistics import (
|
||||
async_add_external_statistics,
|
||||
@ -137,7 +138,9 @@ def test_converters_align_with_sensor() -> None:
|
||||
assert any(c for c in UNIT_CONVERTERS.values() if unit_class == c.UNIT_CLASS)
|
||||
|
||||
|
||||
async def test_statistics_during_period(recorder_mock, hass, hass_ws_client):
|
||||
async def test_statistics_during_period(
|
||||
recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||
) -> None:
|
||||
"""Test statistics_during_period."""
|
||||
now = dt_util.utcnow()
|
||||
|
||||
@ -216,7 +219,12 @@ async def test_statistics_during_period(recorder_mock, hass, hass_ws_client):
|
||||
|
||||
@freeze_time(datetime.datetime(2022, 10, 21, 7, 25, tzinfo=datetime.timezone.utc))
|
||||
@pytest.mark.parametrize("offset", (0, 1, 2))
|
||||
async def test_statistic_during_period(recorder_mock, hass, hass_ws_client, offset):
|
||||
async def test_statistic_during_period(
|
||||
recorder_mock: Recorder,
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
offset,
|
||||
) -> None:
|
||||
"""Test statistic_during_period."""
|
||||
id = 1
|
||||
|
||||
@ -625,7 +633,9 @@ async def test_statistic_during_period(recorder_mock, hass, hass_ws_client, offs
|
||||
|
||||
|
||||
@freeze_time(datetime.datetime(2022, 10, 21, 7, 25, tzinfo=datetime.timezone.utc))
|
||||
async def test_statistic_during_period_hole(recorder_mock, hass, hass_ws_client):
|
||||
async def test_statistic_during_period_hole(
|
||||
recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||
) -> None:
|
||||
"""Test statistic_during_period when there are holes in the data."""
|
||||
id = 1
|
||||
|
||||
@ -842,8 +852,13 @@ async def test_statistic_during_period_hole(recorder_mock, hass, hass_ws_client)
|
||||
),
|
||||
)
|
||||
async def test_statistic_during_period_calendar(
|
||||
recorder_mock, hass, hass_ws_client, calendar_period, start_time, end_time
|
||||
):
|
||||
recorder_mock: Recorder,
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
calendar_period,
|
||||
start_time,
|
||||
end_time,
|
||||
) -> None:
|
||||
"""Test statistic_during_period."""
|
||||
client = await hass_ws_client()
|
||||
|
||||
@ -891,15 +906,15 @@ async def test_statistic_during_period_calendar(
|
||||
],
|
||||
)
|
||||
async def test_statistics_during_period_unit_conversion(
|
||||
recorder_mock,
|
||||
hass,
|
||||
hass_ws_client,
|
||||
recorder_mock: Recorder,
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
attributes,
|
||||
state,
|
||||
value,
|
||||
custom_units,
|
||||
converted_value,
|
||||
):
|
||||
) -> None:
|
||||
"""Test statistics_during_period."""
|
||||
now = dt_util.utcnow()
|
||||
|
||||
@ -982,15 +997,15 @@ async def test_statistics_during_period_unit_conversion(
|
||||
],
|
||||
)
|
||||
async def test_sum_statistics_during_period_unit_conversion(
|
||||
recorder_mock,
|
||||
hass,
|
||||
hass_ws_client,
|
||||
recorder_mock: Recorder,
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
attributes,
|
||||
state,
|
||||
value,
|
||||
custom_units,
|
||||
converted_value,
|
||||
):
|
||||
) -> None:
|
||||
"""Test statistics_during_period."""
|
||||
now = dt_util.utcnow()
|
||||
|
||||
@ -1073,8 +1088,11 @@ async def test_sum_statistics_during_period_unit_conversion(
|
||||
],
|
||||
)
|
||||
async def test_statistics_during_period_invalid_unit_conversion(
|
||||
recorder_mock, hass, hass_ws_client, custom_units
|
||||
):
|
||||
recorder_mock: Recorder,
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
custom_units,
|
||||
) -> None:
|
||||
"""Test statistics_during_period."""
|
||||
now = dt_util.utcnow()
|
||||
|
||||
@ -1114,8 +1132,8 @@ async def test_statistics_during_period_invalid_unit_conversion(
|
||||
|
||||
|
||||
async def test_statistics_during_period_in_the_past(
|
||||
recorder_mock, hass, hass_ws_client
|
||||
):
|
||||
recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||
) -> None:
|
||||
"""Test statistics_during_period in the past."""
|
||||
hass.config.set_time_zone("UTC")
|
||||
now = dt_util.utcnow().replace()
|
||||
@ -1235,8 +1253,8 @@ async def test_statistics_during_period_in_the_past(
|
||||
|
||||
|
||||
async def test_statistics_during_period_bad_start_time(
|
||||
recorder_mock, hass, hass_ws_client
|
||||
):
|
||||
recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||
) -> None:
|
||||
"""Test statistics_during_period."""
|
||||
client = await hass_ws_client()
|
||||
await client.send_json(
|
||||
@ -1254,8 +1272,8 @@ async def test_statistics_during_period_bad_start_time(
|
||||
|
||||
|
||||
async def test_statistics_during_period_bad_end_time(
|
||||
recorder_mock, hass, hass_ws_client
|
||||
):
|
||||
recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||
) -> None:
|
||||
"""Test statistics_during_period."""
|
||||
now = dt_util.utcnow()
|
||||
|
||||
@ -1276,8 +1294,8 @@ async def test_statistics_during_period_bad_end_time(
|
||||
|
||||
|
||||
async def test_statistics_during_period_no_statistic_ids(
|
||||
recorder_mock, hass, hass_ws_client
|
||||
):
|
||||
recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||
) -> None:
|
||||
"""Test statistics_during_period without passing statistic_ids."""
|
||||
now = dt_util.utcnow()
|
||||
|
||||
@ -1297,8 +1315,8 @@ async def test_statistics_during_period_no_statistic_ids(
|
||||
|
||||
|
||||
async def test_statistics_during_period_empty_statistic_ids(
|
||||
recorder_mock, hass, hass_ws_client
|
||||
):
|
||||
recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||
) -> None:
|
||||
"""Test statistics_during_period with passing an empty list of statistic_ids."""
|
||||
now = dt_util.utcnow()
|
||||
|
||||
@ -1376,15 +1394,15 @@ async def test_statistics_during_period_empty_statistic_ids(
|
||||
],
|
||||
)
|
||||
async def test_list_statistic_ids(
|
||||
recorder_mock,
|
||||
hass,
|
||||
hass_ws_client,
|
||||
recorder_mock: Recorder,
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
units,
|
||||
attributes,
|
||||
display_unit,
|
||||
statistics_unit,
|
||||
unit_class,
|
||||
):
|
||||
) -> None:
|
||||
"""Test list_statistic_ids."""
|
||||
now = dt_util.utcnow()
|
||||
has_mean = attributes["state_class"] == "measurement"
|
||||
@ -1540,15 +1558,15 @@ async def test_list_statistic_ids(
|
||||
],
|
||||
)
|
||||
async def test_list_statistic_ids_unit_change(
|
||||
recorder_mock,
|
||||
hass,
|
||||
hass_ws_client,
|
||||
recorder_mock: Recorder,
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
attributes,
|
||||
attributes2,
|
||||
display_unit,
|
||||
statistics_unit,
|
||||
unit_class,
|
||||
):
|
||||
) -> None:
|
||||
"""Test list_statistic_ids."""
|
||||
now = dt_util.utcnow()
|
||||
has_mean = attributes["state_class"] == "measurement"
|
||||
@ -1605,7 +1623,9 @@ async def test_list_statistic_ids_unit_change(
|
||||
]
|
||||
|
||||
|
||||
async def test_validate_statistics(recorder_mock, hass, hass_ws_client):
|
||||
async def test_validate_statistics(
|
||||
recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||
) -> None:
|
||||
"""Test validate_statistics can be called."""
|
||||
id = 1
|
||||
|
||||
@ -1627,7 +1647,9 @@ async def test_validate_statistics(recorder_mock, hass, hass_ws_client):
|
||||
await assert_validation_result(client, {})
|
||||
|
||||
|
||||
async def test_clear_statistics(recorder_mock, hass, hass_ws_client):
|
||||
async def test_clear_statistics(
|
||||
recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||
) -> None:
|
||||
"""Test removing statistics."""
|
||||
now = dt_util.utcnow()
|
||||
|
||||
@ -1755,8 +1777,13 @@ async def test_clear_statistics(recorder_mock, hass, hass_ws_client):
|
||||
[("dogs", None, "dogs"), (None, "unitless", None), ("W", "power", "kW")],
|
||||
)
|
||||
async def test_update_statistics_metadata(
|
||||
recorder_mock, hass, hass_ws_client, new_unit, new_unit_class, new_display_unit
|
||||
):
|
||||
recorder_mock: Recorder,
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
new_unit,
|
||||
new_unit_class,
|
||||
new_display_unit,
|
||||
) -> None:
|
||||
"""Test removing statistics."""
|
||||
now = dt_util.utcnow()
|
||||
|
||||
@ -1847,7 +1874,9 @@ async def test_update_statistics_metadata(
|
||||
}
|
||||
|
||||
|
||||
async def test_change_statistics_unit(recorder_mock, hass, hass_ws_client):
|
||||
async def test_change_statistics_unit(
|
||||
recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||
) -> None:
|
||||
"""Test change unit of recorded statistics."""
|
||||
now = dt_util.utcnow()
|
||||
|
||||
@ -1966,8 +1995,11 @@ async def test_change_statistics_unit(recorder_mock, hass, hass_ws_client):
|
||||
|
||||
|
||||
async def test_change_statistics_unit_errors(
|
||||
recorder_mock, hass, hass_ws_client, caplog
|
||||
):
|
||||
recorder_mock: Recorder,
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test change unit of recorded statistics."""
|
||||
now = dt_util.utcnow()
|
||||
ws_id = 0
|
||||
@ -2083,7 +2115,9 @@ async def test_change_statistics_unit_errors(
|
||||
await assert_statistics(expected_statistics)
|
||||
|
||||
|
||||
async def test_recorder_info(recorder_mock, hass, hass_ws_client):
|
||||
async def test_recorder_info(
|
||||
recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||
) -> None:
|
||||
"""Test getting recorder status."""
|
||||
client = await hass_ws_client()
|
||||
|
||||
@ -2206,8 +2240,10 @@ async def test_recorder_info_migration_queue_exhausted(
|
||||
|
||||
|
||||
async def test_backup_start_no_recorder(
|
||||
hass, hass_ws_client, hass_supervisor_access_token
|
||||
):
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
hass_supervisor_access_token: str,
|
||||
) -> None:
|
||||
"""Test getting backup start when recorder is not present."""
|
||||
client = await hass_ws_client(hass, hass_supervisor_access_token)
|
||||
|
||||
@ -2218,8 +2254,12 @@ async def test_backup_start_no_recorder(
|
||||
|
||||
|
||||
async def test_backup_start_timeout(
|
||||
recorder_mock, hass, hass_ws_client, hass_supervisor_access_token, recorder_db_url
|
||||
):
|
||||
recorder_mock: Recorder,
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
hass_supervisor_access_token: str,
|
||||
recorder_db_url: str,
|
||||
) -> None:
|
||||
"""Test getting backup start when recorder is not present."""
|
||||
if recorder_db_url.startswith(("mysql://", "postgresql://")):
|
||||
# This test is specific for SQLite: Locking is not implemented for other engines
|
||||
@ -2241,8 +2281,11 @@ async def test_backup_start_timeout(
|
||||
|
||||
|
||||
async def test_backup_end(
|
||||
recorder_mock, hass, hass_ws_client, hass_supervisor_access_token
|
||||
):
|
||||
recorder_mock: Recorder,
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
hass_supervisor_access_token: str,
|
||||
) -> None:
|
||||
"""Test backup start."""
|
||||
client = await hass_ws_client(hass, hass_supervisor_access_token)
|
||||
|
||||
@ -2259,8 +2302,12 @@ async def test_backup_end(
|
||||
|
||||
|
||||
async def test_backup_end_without_start(
|
||||
recorder_mock, hass, hass_ws_client, hass_supervisor_access_token, recorder_db_url
|
||||
):
|
||||
recorder_mock: Recorder,
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
hass_supervisor_access_token: str,
|
||||
recorder_db_url: str,
|
||||
) -> None:
|
||||
"""Test backup start."""
|
||||
if recorder_db_url.startswith(("mysql://", "postgresql://")):
|
||||
# This test is specific for SQLite: Locking is not implemented for other engines
|
||||
@ -2297,8 +2344,14 @@ async def test_backup_end_without_start(
|
||||
],
|
||||
)
|
||||
async def test_get_statistics_metadata(
|
||||
recorder_mock, hass, hass_ws_client, units, attributes, unit, unit_class
|
||||
):
|
||||
recorder_mock: Recorder,
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
units,
|
||||
attributes,
|
||||
unit,
|
||||
unit_class,
|
||||
) -> None:
|
||||
"""Test get_statistics_metadata."""
|
||||
now = dt_util.utcnow()
|
||||
has_mean = attributes["state_class"] == "measurement"
|
||||
@ -2445,8 +2498,13 @@ async def test_get_statistics_metadata(
|
||||
),
|
||||
)
|
||||
async def test_import_statistics(
|
||||
recorder_mock, hass, hass_ws_client, caplog, source, statistic_id
|
||||
):
|
||||
recorder_mock: Recorder,
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
source,
|
||||
statistic_id,
|
||||
) -> None:
|
||||
"""Test importing statistics."""
|
||||
client = await hass_ws_client()
|
||||
|
||||
@ -2672,8 +2730,13 @@ async def test_import_statistics(
|
||||
),
|
||||
)
|
||||
async def test_adjust_sum_statistics_energy(
|
||||
recorder_mock, hass, hass_ws_client, caplog, source, statistic_id
|
||||
):
|
||||
recorder_mock: Recorder,
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
source,
|
||||
statistic_id,
|
||||
) -> None:
|
||||
"""Test adjusting statistics."""
|
||||
client = await hass_ws_client()
|
||||
|
||||
@ -2863,8 +2926,13 @@ async def test_adjust_sum_statistics_energy(
|
||||
),
|
||||
)
|
||||
async def test_adjust_sum_statistics_gas(
|
||||
recorder_mock, hass, hass_ws_client, caplog, source, statistic_id
|
||||
):
|
||||
recorder_mock: Recorder,
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
source,
|
||||
statistic_id,
|
||||
) -> None:
|
||||
"""Test adjusting statistics."""
|
||||
client = await hass_ws_client()
|
||||
|
||||
@ -3065,17 +3133,17 @@ async def test_adjust_sum_statistics_gas(
|
||||
),
|
||||
)
|
||||
async def test_adjust_sum_statistics_errors(
|
||||
recorder_mock,
|
||||
hass,
|
||||
hass_ws_client,
|
||||
caplog,
|
||||
recorder_mock: Recorder,
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
state_unit,
|
||||
statistic_unit,
|
||||
unit_class,
|
||||
factor,
|
||||
valid_units,
|
||||
invalid_units,
|
||||
):
|
||||
) -> None:
|
||||
"""Test incorrectly adjusting statistics."""
|
||||
statistic_id = "sensor.total_energy_import"
|
||||
source = "recorder"
|
||||
|
@ -5,12 +5,12 @@ from unittest.mock import patch
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import recorder
|
||||
|
||||
from tests.common import SetupRecorderInstanceT
|
||||
from tests.typing import RecorderInstanceGenerator
|
||||
|
||||
|
||||
async def test_async_migration_in_progress(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT, hass: HomeAssistant
|
||||
):
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test async_migration_in_progress wraps the recorder."""
|
||||
with patch(
|
||||
"homeassistant.components.recorder.util.async_migration_in_progress",
|
||||
|
Loading…
x
Reference in New Issue
Block a user