Avoid locking the database for non-SQLite backends (#63847)

* Avoid locking the database for non-SQLite backends

Currently we only have a lock implementation for SQLite. Just return
success for all other databases as they are not expected to store data
in the config directory and the caller can assume that a backup can
be safely taken.

This fixes `RuntimeError: generator didn't yield` errors when creating
a backup with the current Supervisor dev builds.
This commit is contained in:
Stefan Agner 2022-01-11 16:17:56 +01:00 committed by GitHub
parent f2a6118435
commit 0a9927d18e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 52 additions and 18 deletions

View File

@ -78,7 +78,7 @@ from .util import (
session_scope, session_scope,
setup_connection_for_dialect, setup_connection_for_dialect,
validate_or_move_away_sqlite_database, validate_or_move_away_sqlite_database,
write_lock_db, write_lock_db_sqlite,
) )
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@ -870,7 +870,7 @@ class Recorder(threading.Thread):
def _async_set_database_locked(task: DatabaseLockTask): def _async_set_database_locked(task: DatabaseLockTask):
task.database_locked.set() task.database_locked.set()
with write_lock_db(self): with write_lock_db_sqlite(self):
# Notify that lock is being held, wait until database can be used again. # Notify that lock is being held, wait until database can be used again.
self.hass.add_job(_async_set_database_locked, task) self.hass.add_job(_async_set_database_locked, task)
while not task.database_unlock.wait(timeout=DB_LOCK_QUEUE_CHECK_TIMEOUT): while not task.database_unlock.wait(timeout=DB_LOCK_QUEUE_CHECK_TIMEOUT):
@ -1057,6 +1057,12 @@ class Recorder(threading.Thread):
async def lock_database(self) -> bool: async def lock_database(self) -> bool:
"""Lock database so it can be backed up safely.""" """Lock database so it can be backed up safely."""
if not self.engine or self.engine.dialect.name != "sqlite":
_LOGGER.debug(
"Not a SQLite database or not connected, locking not necessary"
)
return True
if self._database_lock_task: if self._database_lock_task:
_LOGGER.warning("Database already locked") _LOGGER.warning("Database already locked")
return False return False
@ -1080,6 +1086,12 @@ class Recorder(threading.Thread):
Returns true if database lock has been held throughout the process. Returns true if database lock has been held throughout the process.
""" """
if not self.engine or self.engine.dialect.name != "sqlite":
_LOGGER.debug(
"Not a SQLite database or not connected, unlocking not necessary"
)
return True
if not self._database_lock_task: if not self._database_lock_task:
_LOGGER.warning("Database currently not locked") _LOGGER.warning("Database currently not locked")
return False return False

View File

@ -462,10 +462,9 @@ def perodic_db_cleanups(instance: Recorder):
@contextmanager @contextmanager
def write_lock_db(instance: Recorder): def write_lock_db_sqlite(instance: Recorder):
"""Lock database for writes.""" """Lock database for writes."""
if instance.engine.dialect.name == "sqlite":
with instance.engine.connect() as connection: with instance.engine.connect() as connection:
# Execute sqlite to create a wal checkpoint # Execute sqlite to create a wal checkpoint
# This is optional but makes sure the backup is going to be minimal # This is optional but makes sure the backup is going to be minimal

View File

@ -3,6 +3,7 @@
import asyncio import asyncio
from datetime import datetime, timedelta from datetime import datetime, timedelta
import sqlite3 import sqlite3
import threading
from unittest.mock import patch from unittest.mock import patch
import pytest import pytest
@ -1204,12 +1205,34 @@ async def test_database_lock_timeout(hass):
"""Test locking database timeout when recorder stopped.""" """Test locking database timeout when recorder stopped."""
await async_init_recorder_component(hass) await async_init_recorder_component(hass)
hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP)
await hass.async_block_till_done()
instance: Recorder = hass.data[DATA_INSTANCE] instance: Recorder = hass.data[DATA_INSTANCE]
class BlockQueue(recorder.RecorderTask):
event: threading.Event = threading.Event()
def run(self, instance: Recorder) -> None:
self.event.wait()
block_task = BlockQueue()
instance.queue.put(block_task)
with patch.object(recorder, "DB_LOCK_TIMEOUT", 0.1): with patch.object(recorder, "DB_LOCK_TIMEOUT", 0.1):
try: try:
with pytest.raises(TimeoutError): with pytest.raises(TimeoutError):
await instance.lock_database() await instance.lock_database()
finally: finally:
instance.unlock_database() instance.unlock_database()
block_task.event.set()
async def test_database_lock_without_instance(hass):
"""Test database lock doesn't fail if instance is not initialized."""
await async_init_recorder_component(hass)
hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP)
instance: Recorder = hass.data[DATA_INSTANCE]
with patch.object(instance, "engine", None):
try:
assert await instance.lock_database()
finally:
assert instance.unlock_database()

View File

@ -570,7 +570,7 @@ async def test_write_lock_db(hass, tmp_path):
instance = hass.data[DATA_INSTANCE] instance = hass.data[DATA_INSTANCE]
with util.write_lock_db(instance): with util.write_lock_db_sqlite(instance):
# Database should be locked now, try writing SQL command # Database should be locked now, try writing SQL command
with instance.engine.connect() as connection: with instance.engine.connect() as connection:
with pytest.raises(OperationalError): with pytest.raises(OperationalError):