mirror of
https://github.com/home-assistant/core.git
synced 2025-07-13 08:17:08 +00:00
Add recorder test fixture to enable persistent SQLite database (#121137)
* Add recorder test fixture to enable persistent SQLite database * Fix tests directly using async_test_home_assistant context manager
This commit is contained in:
parent
24f6e6e885
commit
d55d02623a
@ -1,7 +1,6 @@
|
||||
"""Test removing statistics duplicates."""
|
||||
|
||||
import importlib
|
||||
from pathlib import Path
|
||||
import sys
|
||||
from unittest.mock import patch
|
||||
|
||||
@ -15,7 +14,6 @@ from homeassistant.components.recorder.auto_repairs.statistics.duplicates import
|
||||
delete_statistics_duplicates,
|
||||
delete_statistics_meta_duplicates,
|
||||
)
|
||||
from homeassistant.components.recorder.const import SQLITE_URL_PREFIX
|
||||
from homeassistant.components.recorder.statistics import async_add_external_statistics
|
||||
from homeassistant.components.recorder.util import session_scope
|
||||
from homeassistant.core import HomeAssistant
|
||||
@ -133,17 +131,13 @@ def _create_engine_28(*args, **kwargs):
|
||||
return engine
|
||||
|
||||
|
||||
@pytest.mark.parametrize("persistent_database", [True])
|
||||
@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage
|
||||
async def test_delete_metadata_duplicates(
|
||||
async_test_recorder: RecorderInstanceGenerator,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test removal of duplicated statistics."""
|
||||
test_dir = tmp_path.joinpath("sqlite")
|
||||
test_dir.mkdir()
|
||||
test_db_file = test_dir.joinpath("test_run_info.db")
|
||||
dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}"
|
||||
|
||||
module = "tests.components.recorder.db_schema_28"
|
||||
importlib.import_module(module)
|
||||
old_db_schema = sys.modules[module]
|
||||
@ -202,7 +196,7 @@ async def test_delete_metadata_duplicates(
|
||||
):
|
||||
async with (
|
||||
async_test_home_assistant() as hass,
|
||||
async_test_recorder(hass, {"db_url": dburl}),
|
||||
async_test_recorder(hass),
|
||||
):
|
||||
await async_wait_recording_done(hass)
|
||||
await async_wait_recording_done(hass)
|
||||
@ -224,7 +218,7 @@ async def test_delete_metadata_duplicates(
|
||||
# Test that the duplicates are removed during migration from schema 28
|
||||
async with (
|
||||
async_test_home_assistant() as hass,
|
||||
async_test_recorder(hass, {"db_url": dburl}),
|
||||
async_test_recorder(hass),
|
||||
):
|
||||
await hass.async_start()
|
||||
await async_wait_recording_done(hass)
|
||||
@ -242,17 +236,13 @@ async def test_delete_metadata_duplicates(
|
||||
await hass.async_stop()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("persistent_database", [True])
|
||||
@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage
|
||||
async def test_delete_metadata_duplicates_many(
|
||||
async_test_recorder: RecorderInstanceGenerator,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test removal of duplicated statistics."""
|
||||
test_dir = tmp_path.joinpath("sqlite")
|
||||
test_dir.mkdir()
|
||||
test_db_file = test_dir.joinpath("test_run_info.db")
|
||||
dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}"
|
||||
|
||||
module = "tests.components.recorder.db_schema_28"
|
||||
importlib.import_module(module)
|
||||
old_db_schema = sys.modules[module]
|
||||
@ -323,7 +313,7 @@ async def test_delete_metadata_duplicates_many(
|
||||
):
|
||||
async with (
|
||||
async_test_home_assistant() as hass,
|
||||
async_test_recorder(hass, {"db_url": dburl}),
|
||||
async_test_recorder(hass),
|
||||
):
|
||||
await async_wait_recording_done(hass)
|
||||
await async_wait_recording_done(hass)
|
||||
@ -336,7 +326,7 @@ async def test_delete_metadata_duplicates_many(
|
||||
# Test that the duplicates are removed during migration from schema 28
|
||||
async with (
|
||||
async_test_home_assistant() as hass,
|
||||
async_test_recorder(hass, {"db_url": dburl}),
|
||||
async_test_recorder(hass),
|
||||
):
|
||||
await hass.async_start()
|
||||
await async_wait_recording_done(hass)
|
||||
|
@ -5,7 +5,6 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from collections.abc import Generator
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
import sqlite3
|
||||
import threading
|
||||
from typing import Any, cast
|
||||
@ -26,7 +25,6 @@ from homeassistant.components.recorder import (
|
||||
CONF_DB_URL,
|
||||
CONFIG_SCHEMA,
|
||||
DOMAIN,
|
||||
SQLITE_URL_PREFIX,
|
||||
Recorder,
|
||||
db_schema,
|
||||
get_instance,
|
||||
@ -140,19 +138,16 @@ def _default_recorder(hass):
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("persistent_database", [True])
|
||||
async def test_shutdown_before_startup_finishes(
|
||||
hass: HomeAssistant,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
recorder_db_url: str,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test shutdown before recorder starts is clean."""
|
||||
if recorder_db_url == "sqlite://":
|
||||
# On-disk database because this test does not play nice with the
|
||||
# MutexPool
|
||||
recorder_db_url = "sqlite:///" + str(tmp_path / "pytest.db")
|
||||
"""Test shutdown before recorder starts is clean.
|
||||
|
||||
On-disk database because this test does not play nice with the MutexPool.
|
||||
"""
|
||||
config = {
|
||||
recorder.CONF_DB_URL: recorder_db_url,
|
||||
recorder.CONF_COMMIT_INTERVAL: 1,
|
||||
}
|
||||
hass.set_state(CoreState.not_running)
|
||||
@ -1371,15 +1366,13 @@ async def test_statistics_runs_initiated(
|
||||
|
||||
|
||||
@pytest.mark.freeze_time("2022-09-13 09:00:00+02:00")
|
||||
@pytest.mark.parametrize("persistent_database", [True])
|
||||
@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage
|
||||
async def test_compile_missing_statistics(
|
||||
tmp_path: Path, freezer: FrozenDateTimeFactory
|
||||
recorder_db_url: str, freezer: FrozenDateTimeFactory
|
||||
) -> None:
|
||||
"""Test missing statistics are compiled on startup."""
|
||||
now = dt_util.utcnow().replace(minute=0, second=0, microsecond=0)
|
||||
test_dir = tmp_path.joinpath("sqlite")
|
||||
test_dir.mkdir()
|
||||
test_db_file = test_dir.joinpath("test_run_info.db")
|
||||
dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}"
|
||||
|
||||
def get_statistic_runs(hass: HomeAssistant) -> list:
|
||||
with session_scope(hass=hass, read_only=True) as session:
|
||||
@ -1387,7 +1380,9 @@ async def test_compile_missing_statistics(
|
||||
|
||||
async with async_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_DB_URL: dburl}})
|
||||
await async_setup_component(
|
||||
hass, DOMAIN, {DOMAIN: {CONF_DB_URL: recorder_db_url}}
|
||||
)
|
||||
await hass.async_start()
|
||||
await async_wait_recording_done(hass)
|
||||
await async_wait_recording_done(hass)
|
||||
@ -1428,7 +1423,9 @@ async def test_compile_missing_statistics(
|
||||
)
|
||||
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_DB_URL: dburl}})
|
||||
await async_setup_component(
|
||||
hass, DOMAIN, {DOMAIN: {CONF_DB_URL: recorder_db_url}}
|
||||
)
|
||||
await hass.async_start()
|
||||
await async_wait_recording_done(hass)
|
||||
await async_wait_recording_done(hass)
|
||||
@ -1633,12 +1630,10 @@ async def test_service_disable_states_not_recording(
|
||||
)
|
||||
|
||||
|
||||
async def test_service_disable_run_information_recorded(tmp_path: Path) -> None:
|
||||
@pytest.mark.parametrize("persistent_database", [True])
|
||||
@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage
|
||||
async def test_service_disable_run_information_recorded(recorder_db_url: str) -> None:
|
||||
"""Test that runs are still recorded when recorder is disabled."""
|
||||
test_dir = tmp_path.joinpath("sqlite")
|
||||
test_dir.mkdir()
|
||||
test_db_file = test_dir.joinpath("test_run_info.db")
|
||||
dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}"
|
||||
|
||||
def get_recorder_runs(hass: HomeAssistant) -> list:
|
||||
with session_scope(hass=hass, read_only=True) as session:
|
||||
@ -1646,7 +1641,9 @@ async def test_service_disable_run_information_recorded(tmp_path: Path) -> None:
|
||||
|
||||
async with async_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_DB_URL: dburl}})
|
||||
await async_setup_component(
|
||||
hass, DOMAIN, {DOMAIN: {CONF_DB_URL: recorder_db_url}}
|
||||
)
|
||||
await hass.async_start()
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
@ -1668,7 +1665,9 @@ async def test_service_disable_run_information_recorded(tmp_path: Path) -> None:
|
||||
|
||||
async with async_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_DB_URL: dburl}})
|
||||
await async_setup_component(
|
||||
hass, DOMAIN, {DOMAIN: {CONF_DB_URL: recorder_db_url}}
|
||||
)
|
||||
await hass.async_start()
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
@ -1687,22 +1686,16 @@ class CannotSerializeMe:
|
||||
"""A class that the JSONEncoder cannot serialize."""
|
||||
|
||||
|
||||
@pytest.mark.skip_on_db_engine(["mysql", "postgresql"])
|
||||
@pytest.mark.usefixtures("skip_by_db_engine")
|
||||
@pytest.mark.parametrize("persistent_database", [True])
|
||||
async def test_database_corruption_while_running(
|
||||
hass: HomeAssistant, tmp_path: Path, caplog: pytest.LogCaptureFixture
|
||||
hass: HomeAssistant, recorder_db_url: str, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test we can recover from sqlite3 db corruption."""
|
||||
|
||||
def _create_tmpdir_for_test_db() -> Path:
|
||||
test_dir = tmp_path.joinpath("sqlite")
|
||||
test_dir.mkdir()
|
||||
return test_dir.joinpath("test.db")
|
||||
|
||||
test_db_file = await hass.async_add_executor_job(_create_tmpdir_for_test_db)
|
||||
dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}"
|
||||
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
assert await async_setup_component(
|
||||
hass, DOMAIN, {DOMAIN: {CONF_DB_URL: dburl, CONF_COMMIT_INTERVAL: 0}}
|
||||
hass, DOMAIN, {DOMAIN: {CONF_DB_URL: recorder_db_url, CONF_COMMIT_INTERVAL: 0}}
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
caplog.clear()
|
||||
@ -1722,6 +1715,7 @@ async def test_database_corruption_while_running(
|
||||
side_effect=OperationalError("statement", {}, []),
|
||||
):
|
||||
await async_wait_recording_done(hass)
|
||||
test_db_file = recorder_db_url.removeprefix("sqlite:///")
|
||||
await hass.async_add_executor_job(corrupt_db_file, test_db_file)
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
@ -1817,23 +1811,19 @@ async def test_entity_id_filter(
|
||||
|
||||
@pytest.mark.skip_on_db_engine(["mysql", "postgresql"])
|
||||
@pytest.mark.usefixtures("skip_by_db_engine")
|
||||
@pytest.mark.parametrize("persistent_database", [True])
|
||||
async def test_database_lock_and_unlock(
|
||||
hass: HomeAssistant,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
recorder_db_url: str,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test writing events during lock getting written after unlocking.
|
||||
|
||||
This test is specific for SQLite: Locking is not implemented for other engines.
|
||||
"""
|
||||
|
||||
if recorder_db_url == "sqlite://":
|
||||
# Use file DB, in memory DB cannot do write locks.
|
||||
recorder_db_url = "sqlite:///" + str(tmp_path / "pytest.db")
|
||||
Use file DB, in memory DB cannot do write locks.
|
||||
"""
|
||||
config = {
|
||||
recorder.CONF_COMMIT_INTERVAL: 0,
|
||||
recorder.CONF_DB_URL: recorder_db_url,
|
||||
}
|
||||
await async_setup_recorder_instance(hass, config)
|
||||
await hass.async_block_till_done()
|
||||
@ -1873,26 +1863,21 @@ async def test_database_lock_and_unlock(
|
||||
|
||||
@pytest.mark.skip_on_db_engine(["mysql", "postgresql"])
|
||||
@pytest.mark.usefixtures("skip_by_db_engine")
|
||||
@pytest.mark.parametrize("persistent_database", [True])
|
||||
async def test_database_lock_and_overflow(
|
||||
hass: HomeAssistant,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
recorder_db_url: str,
|
||||
tmp_path: Path,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
issue_registry: ir.IssueRegistry,
|
||||
) -> None:
|
||||
"""Test writing events during lock leading to overflow the queue causes the database to unlock.
|
||||
|
||||
This test is specific for SQLite: Locking is not implemented for other engines.
|
||||
"""
|
||||
|
||||
# Use file DB, in memory DB cannot do write locks.
|
||||
if recorder_db_url == "sqlite://":
|
||||
# Use file DB, in memory DB cannot do write locks.
|
||||
recorder_db_url = "sqlite:///" + str(tmp_path / "pytest.db")
|
||||
Use file DB, in memory DB cannot do write locks.
|
||||
"""
|
||||
config = {
|
||||
recorder.CONF_COMMIT_INTERVAL: 0,
|
||||
recorder.CONF_DB_URL: recorder_db_url,
|
||||
}
|
||||
|
||||
def _get_db_events():
|
||||
@ -1941,26 +1926,21 @@ async def test_database_lock_and_overflow(
|
||||
|
||||
@pytest.mark.skip_on_db_engine(["mysql", "postgresql"])
|
||||
@pytest.mark.usefixtures("skip_by_db_engine")
|
||||
@pytest.mark.parametrize("persistent_database", [True])
|
||||
async def test_database_lock_and_overflow_checks_available_memory(
|
||||
hass: HomeAssistant,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
recorder_db_url: str,
|
||||
tmp_path: Path,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
issue_registry: ir.IssueRegistry,
|
||||
) -> None:
|
||||
"""Test writing events during lock leading to overflow the queue causes the database to unlock.
|
||||
|
||||
This test is specific for SQLite: Locking is not implemented for other engines.
|
||||
"""
|
||||
|
||||
# Use file DB, in memory DB cannot do write locks.
|
||||
if recorder_db_url == "sqlite://":
|
||||
# Use file DB, in memory DB cannot do write locks.
|
||||
recorder_db_url = "sqlite:///" + str(tmp_path / "pytest.db")
|
||||
Use file DB, in memory DB cannot do write locks.
|
||||
"""
|
||||
config = {
|
||||
recorder.CONF_COMMIT_INTERVAL: 0,
|
||||
recorder.CONF_DB_URL: recorder_db_url,
|
||||
}
|
||||
|
||||
def _get_db_events():
|
||||
@ -2659,7 +2639,6 @@ async def test_commit_before_commits_pending_writes(
|
||||
hass: HomeAssistant,
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
recorder_db_url: str,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test commit_before with a non-zero commit interval.
|
||||
|
||||
|
@ -1,7 +1,6 @@
|
||||
"""Test run time migrations are remembered in the migration_changes table."""
|
||||
|
||||
import importlib
|
||||
from pathlib import Path
|
||||
import sys
|
||||
from unittest.mock import patch
|
||||
|
||||
@ -62,13 +61,11 @@ def _create_engine_test(*args, **kwargs):
|
||||
return engine
|
||||
|
||||
|
||||
@pytest.mark.skip_on_db_engine(["mysql", "postgresql"])
|
||||
@pytest.mark.usefixtures("skip_by_db_engine")
|
||||
@pytest.mark.parametrize("enable_migrate_context_ids", [True])
|
||||
@pytest.mark.parametrize("persistent_database", [True])
|
||||
@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage
|
||||
async def test_migration_changes_prevent_trying_to_migrate_again(
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
tmp_path: Path,
|
||||
recorder_db_url: str,
|
||||
) -> None:
|
||||
"""Test that we do not try to migrate when migration_changes indicate its already migrated.
|
||||
|
||||
@ -77,12 +74,9 @@ async def test_migration_changes_prevent_trying_to_migrate_again(
|
||||
1. With schema 32 to populate the data
|
||||
2. With current schema so the migration happens
|
||||
3. With current schema to verify we do not have to query to see if the migration is done
|
||||
|
||||
This test uses a test database between runs so its SQLite specific. WHY, this makes no sense.???
|
||||
"""
|
||||
|
||||
config = {
|
||||
recorder.CONF_DB_URL: "sqlite:///" + str(tmp_path / "pytest.db"),
|
||||
recorder.CONF_COMMIT_INTERVAL: 1,
|
||||
}
|
||||
importlib.import_module(SCHEMA_MODULE)
|
||||
|
@ -15,7 +15,7 @@ from unittest.mock import patch
|
||||
import pytest
|
||||
|
||||
from homeassistant.components import recorder
|
||||
from homeassistant.components.recorder import SQLITE_URL_PREFIX, get_instance
|
||||
from homeassistant.components.recorder import get_instance
|
||||
from homeassistant.components.recorder.util import session_scope
|
||||
from homeassistant.helpers import recorder as recorder_helper
|
||||
from homeassistant.setup import setup_component
|
||||
@ -34,13 +34,16 @@ SCHEMA_VERSION_POSTFIX = "23_with_newer_columns"
|
||||
SCHEMA_MODULE = get_schema_module_path(SCHEMA_VERSION_POSTFIX)
|
||||
|
||||
|
||||
def test_delete_duplicates(caplog: pytest.LogCaptureFixture, tmp_path: Path) -> None:
|
||||
"""Test removal of duplicated statistics."""
|
||||
test_dir = tmp_path.joinpath("sqlite")
|
||||
test_dir.mkdir()
|
||||
test_db_file = test_dir.joinpath("test_run_info.db")
|
||||
dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}"
|
||||
@pytest.mark.skip_on_db_engine(["mysql", "postgresql"])
|
||||
@pytest.mark.usefixtures("skip_by_db_engine")
|
||||
@pytest.mark.parametrize("persistent_database", [True])
|
||||
def test_delete_duplicates(
|
||||
recorder_db_url: str, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test removal of duplicated statistics.
|
||||
|
||||
The test only works with SQLite.
|
||||
"""
|
||||
importlib.import_module(SCHEMA_MODULE)
|
||||
old_db_schema = sys.modules[SCHEMA_MODULE]
|
||||
|
||||
@ -176,7 +179,7 @@ def test_delete_duplicates(caplog: pytest.LogCaptureFixture, tmp_path: Path) ->
|
||||
get_test_home_assistant() as hass,
|
||||
):
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}})
|
||||
get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident())
|
||||
wait_recording_done(hass)
|
||||
wait_recording_done(hass)
|
||||
@ -204,7 +207,7 @@ def test_delete_duplicates(caplog: pytest.LogCaptureFixture, tmp_path: Path) ->
|
||||
# Test that the duplicates are removed during migration from schema 23
|
||||
with get_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}})
|
||||
hass.start()
|
||||
wait_recording_done(hass)
|
||||
wait_recording_done(hass)
|
||||
@ -215,15 +218,16 @@ def test_delete_duplicates(caplog: pytest.LogCaptureFixture, tmp_path: Path) ->
|
||||
assert "Found duplicated" not in caplog.text
|
||||
|
||||
|
||||
@pytest.mark.skip_on_db_engine(["mysql", "postgresql"])
|
||||
@pytest.mark.usefixtures("skip_by_db_engine")
|
||||
@pytest.mark.parametrize("persistent_database", [True])
|
||||
def test_delete_duplicates_many(
|
||||
caplog: pytest.LogCaptureFixture, tmp_path: Path
|
||||
recorder_db_url: str, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test removal of duplicated statistics."""
|
||||
test_dir = tmp_path.joinpath("sqlite")
|
||||
test_dir.mkdir()
|
||||
test_db_file = test_dir.joinpath("test_run_info.db")
|
||||
dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}"
|
||||
"""Test removal of duplicated statistics.
|
||||
|
||||
The test only works with SQLite.
|
||||
"""
|
||||
importlib.import_module(SCHEMA_MODULE)
|
||||
old_db_schema = sys.modules[SCHEMA_MODULE]
|
||||
|
||||
@ -359,7 +363,7 @@ def test_delete_duplicates_many(
|
||||
get_test_home_assistant() as hass,
|
||||
):
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}})
|
||||
get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident())
|
||||
wait_recording_done(hass)
|
||||
wait_recording_done(hass)
|
||||
@ -393,7 +397,7 @@ def test_delete_duplicates_many(
|
||||
# Test that the duplicates are removed during migration from schema 23
|
||||
with get_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}})
|
||||
hass.start()
|
||||
wait_recording_done(hass)
|
||||
wait_recording_done(hass)
|
||||
@ -405,15 +409,16 @@ def test_delete_duplicates_many(
|
||||
|
||||
|
||||
@pytest.mark.freeze_time("2021-08-01 00:00:00+00:00")
|
||||
@pytest.mark.skip_on_db_engine(["mysql", "postgresql"])
|
||||
@pytest.mark.usefixtures("skip_by_db_engine")
|
||||
@pytest.mark.parametrize("persistent_database", [True])
|
||||
def test_delete_duplicates_non_identical(
|
||||
caplog: pytest.LogCaptureFixture, tmp_path: Path
|
||||
recorder_db_url: str, caplog: pytest.LogCaptureFixture, tmp_path: Path
|
||||
) -> None:
|
||||
"""Test removal of duplicated statistics."""
|
||||
test_dir = tmp_path.joinpath("sqlite")
|
||||
test_dir.mkdir()
|
||||
test_db_file = test_dir.joinpath("test_run_info.db")
|
||||
dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}"
|
||||
"""Test removal of duplicated statistics.
|
||||
|
||||
The test only works with SQLite.
|
||||
"""
|
||||
importlib.import_module(SCHEMA_MODULE)
|
||||
old_db_schema = sys.modules[SCHEMA_MODULE]
|
||||
|
||||
@ -519,7 +524,7 @@ def test_delete_duplicates_non_identical(
|
||||
get_test_home_assistant() as hass,
|
||||
):
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}})
|
||||
get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident())
|
||||
wait_recording_done(hass)
|
||||
wait_recording_done(hass)
|
||||
@ -543,7 +548,7 @@ def test_delete_duplicates_non_identical(
|
||||
with get_test_home_assistant() as hass:
|
||||
hass.config.config_dir = tmp_path
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}})
|
||||
hass.start()
|
||||
wait_recording_done(hass)
|
||||
wait_recording_done(hass)
|
||||
@ -589,15 +594,16 @@ def test_delete_duplicates_non_identical(
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("persistent_database", [True])
|
||||
@pytest.mark.skip_on_db_engine(["mysql", "postgresql"])
|
||||
@pytest.mark.usefixtures("skip_by_db_engine")
|
||||
def test_delete_duplicates_short_term(
|
||||
caplog: pytest.LogCaptureFixture, tmp_path: Path
|
||||
recorder_db_url: str, caplog: pytest.LogCaptureFixture, tmp_path: Path
|
||||
) -> None:
|
||||
"""Test removal of duplicated statistics."""
|
||||
test_dir = tmp_path.joinpath("sqlite")
|
||||
test_dir.mkdir()
|
||||
test_db_file = test_dir.joinpath("test_run_info.db")
|
||||
dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}"
|
||||
"""Test removal of duplicated statistics.
|
||||
|
||||
The test only works with SQLite.
|
||||
"""
|
||||
importlib.import_module(SCHEMA_MODULE)
|
||||
old_db_schema = sys.modules[SCHEMA_MODULE]
|
||||
|
||||
@ -634,7 +640,7 @@ def test_delete_duplicates_short_term(
|
||||
get_test_home_assistant() as hass,
|
||||
):
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}})
|
||||
get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident())
|
||||
wait_recording_done(hass)
|
||||
wait_recording_done(hass)
|
||||
@ -657,7 +663,7 @@ def test_delete_duplicates_short_term(
|
||||
with get_test_home_assistant() as hass:
|
||||
hass.config.config_dir = tmp_path
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}})
|
||||
hass.start()
|
||||
wait_recording_done(hass)
|
||||
wait_recording_done(hass)
|
||||
|
@ -116,12 +116,18 @@ def test_validate_or_move_away_sqlite_database(
|
||||
assert util.validate_or_move_away_sqlite_database(dburl) is True
|
||||
|
||||
|
||||
@pytest.mark.skip_on_db_engine(["mysql", "postgresql"])
|
||||
@pytest.mark.usefixtures("skip_by_db_engine")
|
||||
@pytest.mark.parametrize("persistent_database", [True])
|
||||
@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage
|
||||
async def test_last_run_was_recently_clean(
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator, tmp_path: Path
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
) -> None:
|
||||
"""Test we can check if the last recorder run was recently clean."""
|
||||
"""Test we can check if the last recorder run was recently clean.
|
||||
|
||||
This is only implemented for SQLite.
|
||||
"""
|
||||
config = {
|
||||
recorder.CONF_DB_URL: "sqlite:///" + str(tmp_path / "pytest.db"),
|
||||
recorder.CONF_COMMIT_INTERVAL: 1,
|
||||
}
|
||||
async with async_test_home_assistant() as hass:
|
||||
@ -850,17 +856,22 @@ async def test_periodic_db_cleanups(
|
||||
assert str(text_obj) == "PRAGMA wal_checkpoint(TRUNCATE);"
|
||||
|
||||
|
||||
@pytest.mark.skip_on_db_engine(["mysql", "postgresql"])
|
||||
@pytest.mark.usefixtures("skip_by_db_engine")
|
||||
@pytest.mark.parametrize("persistent_database", [True])
|
||||
async def test_write_lock_db(
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
tmp_path: Path,
|
||||
recorder_db_url: str,
|
||||
) -> None:
|
||||
"""Test database write lock."""
|
||||
"""Test database write lock.
|
||||
|
||||
# Use file DB, in memory DB cannot do write locks.
|
||||
config = {
|
||||
recorder.CONF_DB_URL: "sqlite:///" + str(tmp_path / "pytest.db?timeout=0.1")
|
||||
}
|
||||
This is only supported for SQLite.
|
||||
|
||||
Use file DB, in memory DB cannot do write locks.
|
||||
"""
|
||||
|
||||
config = {recorder.CONF_DB_URL: recorder_db_url + "?timeout=0.1"}
|
||||
instance = await async_setup_recorder_instance(hass, config)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
|
@ -2,7 +2,6 @@
|
||||
|
||||
from datetime import timedelta
|
||||
import importlib
|
||||
from pathlib import Path
|
||||
import sys
|
||||
from unittest.mock import patch
|
||||
|
||||
@ -11,7 +10,7 @@ from sqlalchemy import create_engine, inspect
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from homeassistant.components import recorder
|
||||
from homeassistant.components.recorder import SQLITE_URL_PREFIX, core, statistics
|
||||
from homeassistant.components.recorder import core, statistics
|
||||
from homeassistant.components.recorder.queries import select_event_type_ids
|
||||
from homeassistant.components.recorder.util import session_scope
|
||||
from homeassistant.core import EVENT_STATE_CHANGED, Event, EventOrigin, State
|
||||
@ -49,13 +48,13 @@ def _create_engine_test(*args, **kwargs):
|
||||
return engine
|
||||
|
||||
|
||||
async def test_migrate_times(caplog: pytest.LogCaptureFixture, tmp_path: Path) -> None:
|
||||
@pytest.mark.parametrize("persistent_database", [True])
|
||||
@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage
|
||||
async def test_migrate_times(
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
recorder_db_url: str,
|
||||
) -> None:
|
||||
"""Test we can migrate times."""
|
||||
test_dir = tmp_path.joinpath("sqlite")
|
||||
test_dir.mkdir()
|
||||
test_db_file = test_dir.joinpath("test_run_info.db")
|
||||
dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}"
|
||||
|
||||
importlib.import_module(SCHEMA_MODULE)
|
||||
old_db_schema = sys.modules[SCHEMA_MODULE]
|
||||
now = dt_util.utcnow()
|
||||
@ -123,7 +122,7 @@ async def test_migrate_times(caplog: pytest.LogCaptureFixture, tmp_path: Path) -
|
||||
async with async_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
assert await async_setup_component(
|
||||
hass, "recorder", {"recorder": {"db_url": dburl}}
|
||||
hass, "recorder", {"recorder": {"db_url": recorder_db_url}}
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
await async_wait_recording_done(hass)
|
||||
@ -153,7 +152,7 @@ async def test_migrate_times(caplog: pytest.LogCaptureFixture, tmp_path: Path) -
|
||||
async with async_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
assert await async_setup_component(
|
||||
hass, "recorder", {"recorder": {"db_url": dburl}}
|
||||
hass, "recorder", {"recorder": {"db_url": recorder_db_url}}
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
@ -220,15 +219,13 @@ async def test_migrate_times(caplog: pytest.LogCaptureFixture, tmp_path: Path) -
|
||||
await hass.async_stop()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("persistent_database", [True])
|
||||
@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage
|
||||
async def test_migrate_can_resume_entity_id_post_migration(
|
||||
caplog: pytest.LogCaptureFixture, tmp_path: Path
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
recorder_db_url: str,
|
||||
) -> None:
|
||||
"""Test we resume the entity id post migration after a restart."""
|
||||
test_dir = tmp_path.joinpath("sqlite")
|
||||
test_dir.mkdir()
|
||||
test_db_file = test_dir.joinpath("test_run_info.db")
|
||||
dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}"
|
||||
|
||||
importlib.import_module(SCHEMA_MODULE)
|
||||
old_db_schema = sys.modules[SCHEMA_MODULE]
|
||||
now = dt_util.utcnow()
|
||||
@ -293,7 +290,7 @@ async def test_migrate_can_resume_entity_id_post_migration(
|
||||
async with async_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
assert await async_setup_component(
|
||||
hass, "recorder", {"recorder": {"db_url": dburl}}
|
||||
hass, "recorder", {"recorder": {"db_url": recorder_db_url}}
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
await async_wait_recording_done(hass)
|
||||
@ -323,7 +320,7 @@ async def test_migrate_can_resume_entity_id_post_migration(
|
||||
async with async_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
assert await async_setup_component(
|
||||
hass, "recorder", {"recorder": {"db_url": dburl}}
|
||||
hass, "recorder", {"recorder": {"db_url": recorder_db_url}}
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
|
@ -12,6 +12,7 @@ import itertools
|
||||
import logging
|
||||
import os
|
||||
import reprlib
|
||||
from shutil import rmtree
|
||||
import sqlite3
|
||||
import ssl
|
||||
import threading
|
||||
@ -1309,16 +1310,36 @@ def recorder_config() -> dict[str, Any] | None:
|
||||
return None
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def persistent_database() -> bool:
|
||||
"""Fixture to control if database should persist when recorder is shut down in test.
|
||||
|
||||
When using sqlite, this uses on disk database instead of in memory database.
|
||||
This does nothing when using mysql or postgresql.
|
||||
|
||||
Note that the database is always destroyed in between tests.
|
||||
|
||||
To use a persistent database, tests can be marked with:
|
||||
@pytest.mark.parametrize("persistent_database", [True])
|
||||
"""
|
||||
return False
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def recorder_db_url(
|
||||
pytestconfig: pytest.Config,
|
||||
hass_fixture_setup: list[bool],
|
||||
persistent_database: str,
|
||||
tmp_path_factory: pytest.TempPathFactory,
|
||||
) -> Generator[str]:
|
||||
"""Prepare a default database for tests and return a connection URL."""
|
||||
assert not hass_fixture_setup
|
||||
|
||||
db_url = cast(str, pytestconfig.getoption("dburl"))
|
||||
if db_url.startswith("mysql://"):
|
||||
if db_url == "sqlite://" and persistent_database:
|
||||
tmp_path = tmp_path_factory.mktemp("recorder")
|
||||
db_url = "sqlite:///" + str(tmp_path / "pytest.db")
|
||||
elif db_url.startswith("mysql://"):
|
||||
# pylint: disable-next=import-outside-toplevel
|
||||
import sqlalchemy_utils
|
||||
|
||||
@ -1332,7 +1353,9 @@ def recorder_db_url(
|
||||
assert not sqlalchemy_utils.database_exists(db_url)
|
||||
sqlalchemy_utils.create_database(db_url, encoding="utf8")
|
||||
yield db_url
|
||||
if db_url.startswith("mysql://"):
|
||||
if db_url == "sqlite://" and persistent_database:
|
||||
rmtree(tmp_path, ignore_errors=True)
|
||||
elif db_url.startswith("mysql://"):
|
||||
# pylint: disable-next=import-outside-toplevel
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user