mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-07-16 13:46:31 +00:00
Harmonize folder and add-on backup error handling (#5885)
* Harmonize folder and add-on backup error handling Align add-on and folder backup error handling in that in both cases errors are recorded on the respective backup Jobs, but not raised to the caller. This allows the backup to complete successfully even if some add-ons or folders fail to back up. Along with this, also record errors in the per-add-on and per-folder backup jobs, as well as the add-on and folder root job. And finally, align the exception handling to only catch expected exceptions for add-ons too. * Fix pytest
This commit is contained in:
parent
cbf4b4e27e
commit
d0d11db7b1
@ -1359,9 +1359,7 @@ class Addon(AddonModel):
|
||||
)
|
||||
_LOGGER.info("Finish backup for addon %s", self.slug)
|
||||
except (tarfile.TarError, OSError, AddFileError) as err:
|
||||
raise AddonsError(
|
||||
f"Can't write tarfile {tar_file}: {err}", _LOGGER.error
|
||||
) from err
|
||||
raise AddonsError(f"Can't write tarfile: {err}", _LOGGER.error) from err
|
||||
finally:
|
||||
if was_running:
|
||||
wait_for_start = await self.end_backup()
|
||||
|
@ -603,9 +603,7 @@ class Backup(JobGroup):
|
||||
try:
|
||||
start_task = await addon.backup(addon_file)
|
||||
except AddonsError as err:
|
||||
raise BackupError(
|
||||
f"Can't create backup for {addon.slug}", _LOGGER.error
|
||||
) from err
|
||||
raise BackupError(str(err)) from err
|
||||
|
||||
# Store to config
|
||||
self._data[ATTR_ADDONS].append(
|
||||
@ -634,8 +632,11 @@ class Backup(JobGroup):
|
||||
try:
|
||||
if start_task := await self._addon_save(addon):
|
||||
start_tasks.append(start_task)
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
_LOGGER.warning("Can't save Add-on %s: %s", addon.slug, err)
|
||||
except BackupError as err:
|
||||
err = BackupError(
|
||||
f"Can't backup add-on {addon.slug}: {str(err)}", _LOGGER.error
|
||||
)
|
||||
self.sys_jobs.current.capture_error(err)
|
||||
|
||||
return start_tasks
|
||||
|
||||
@ -764,16 +765,20 @@ class Backup(JobGroup):
|
||||
if await self.sys_run_in_executor(_save):
|
||||
self._data[ATTR_FOLDERS].append(name)
|
||||
except (tarfile.TarError, OSError, AddFileError) as err:
|
||||
raise BackupError(
|
||||
f"Can't backup folder {name}: {str(err)}", _LOGGER.error
|
||||
) from err
|
||||
raise BackupError(f"Can't write tarfile: {str(err)}") from err
|
||||
|
||||
@Job(name="backup_store_folders", cleanup=False)
|
||||
async def store_folders(self, folder_list: list[str]):
|
||||
"""Backup Supervisor data into backup."""
|
||||
# Save folder sequential avoid issue on slow IO
|
||||
for folder in folder_list:
|
||||
await self._folder_save(folder)
|
||||
try:
|
||||
await self._folder_save(folder)
|
||||
except BackupError as err:
|
||||
err = BackupError(
|
||||
f"Can't backup folder {folder}: {str(err)}", _LOGGER.error
|
||||
)
|
||||
self.sys_jobs.current.capture_error(err)
|
||||
|
||||
@Job(name="backup_folder_restore", cleanup=False)
|
||||
async def _folder_restore(self, name: str) -> None:
|
||||
|
@ -372,7 +372,9 @@ async def test_api_backup_errors(
|
||||
|
||||
assert coresys.jobs.jobs == []
|
||||
|
||||
with patch.object(Addon, "backup", side_effect=AddonsError("Backup error")):
|
||||
with patch.object(
|
||||
Addon, "backup", side_effect=(err := AddonsError("Backup error"))
|
||||
):
|
||||
resp = await api_client.post(
|
||||
f"/backups/new/{backup_type}",
|
||||
json={"name": f"{backup_type} backup"} | options,
|
||||
@ -397,7 +399,7 @@ async def test_api_backup_errors(
|
||||
assert job["child_jobs"][1]["child_jobs"][0]["errors"] == [
|
||||
{
|
||||
"type": "BackupError",
|
||||
"message": "Can't create backup for local_ssh",
|
||||
"message": str(err),
|
||||
"stage": None,
|
||||
}
|
||||
]
|
||||
|
@ -8,16 +8,20 @@ import tarfile
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from securetar import AddFileError
|
||||
|
||||
from supervisor.addons.addon import Addon
|
||||
from supervisor.backups.backup import Backup, BackupLocation
|
||||
from supervisor.backups.const import BackupType
|
||||
from supervisor.coresys import CoreSys
|
||||
from supervisor.exceptions import (
|
||||
AddonsError,
|
||||
BackupFileExistError,
|
||||
BackupFileNotFoundError,
|
||||
BackupInvalidError,
|
||||
BackupPermissionError,
|
||||
)
|
||||
from supervisor.jobs import JobSchedulerOptions
|
||||
|
||||
from tests.common import get_fixture_path
|
||||
|
||||
@ -68,6 +72,70 @@ async def test_new_backup_exists_error(coresys: CoreSys, tmp_path: Path):
|
||||
pass
|
||||
|
||||
|
||||
async def test_backup_error_addon(
|
||||
coresys: CoreSys, install_addon_ssh: Addon, tmp_path: Path
|
||||
):
|
||||
"""Test if errors during add-on backup is correctly recorded in jobs."""
|
||||
backup_file = tmp_path / "my_backup.tar"
|
||||
backup = Backup(coresys, backup_file, "test", None)
|
||||
backup.new("test", "2023-07-21T21:05:00.000000+00:00", BackupType.FULL)
|
||||
|
||||
install_addon_ssh.backup = MagicMock(
|
||||
side_effect=(err := AddonsError("Fake add-on backup error"))
|
||||
)
|
||||
|
||||
async with backup.create():
|
||||
# Validate that the add-on exception is collected in the main job
|
||||
backup_store_addons_job, backup_task = coresys.jobs.schedule_job(
|
||||
backup.store_addons, JobSchedulerOptions(), [install_addon_ssh]
|
||||
)
|
||||
await backup_task
|
||||
assert len(backup_store_addons_job.errors) == 1
|
||||
assert str(err) in backup_store_addons_job.errors[0].message
|
||||
|
||||
# Check backup_addon_restore child job has the same error
|
||||
child_jobs = [
|
||||
job
|
||||
for job in coresys.jobs.jobs
|
||||
if job.parent_id == backup_store_addons_job.uuid
|
||||
]
|
||||
assert len(child_jobs) == 1
|
||||
assert child_jobs[0].errors[0].message == str(err)
|
||||
|
||||
|
||||
async def test_backup_error_folder(
|
||||
coresys: CoreSys, tmp_supervisor_data: Path, tmp_path: Path
|
||||
):
|
||||
"""Test if errors during folder backup is correctly recorded in jobs."""
|
||||
backup_file = tmp_path / "my_backup.tar"
|
||||
backup = Backup(coresys, backup_file, "test", None)
|
||||
backup.new("test", "2023-07-21T21:05:00.000000+00:00", BackupType.FULL)
|
||||
|
||||
async with backup.create():
|
||||
# Validate that the folder exception is collected in the main job
|
||||
with patch(
|
||||
"supervisor.backups.backup.atomic_contents_add",
|
||||
MagicMock(
|
||||
side_effect=(err := AddFileError(".", "Fake folder backup error"))
|
||||
),
|
||||
):
|
||||
backup_store_folders, backup_task = coresys.jobs.schedule_job(
|
||||
backup.store_folders, JobSchedulerOptions(), ["media"]
|
||||
)
|
||||
await backup_task
|
||||
assert len(backup_store_folders.errors) == 1
|
||||
assert str(err) in backup_store_folders.errors[0].message
|
||||
|
||||
# Check backup_folder_save child job has the same error
|
||||
child_jobs = [
|
||||
job
|
||||
for job in coresys.jobs.jobs
|
||||
if job.parent_id == backup_store_folders.uuid
|
||||
]
|
||||
assert len(child_jobs) == 1
|
||||
assert str(err) in child_jobs[0].errors[0].message
|
||||
|
||||
|
||||
async def test_consolidate_conflict_varied_encryption(
|
||||
coresys: CoreSys, tmp_path: Path, caplog: pytest.LogCaptureFixture
|
||||
):
|
||||
|
@ -434,34 +434,6 @@ async def test_backup_error_homeassistant(
|
||||
backup_instance.tarfile.unlink.assert_called_once()
|
||||
|
||||
|
||||
async def test_backup_error_folder(
|
||||
coresys: CoreSys,
|
||||
backup_mock: MagicMock,
|
||||
install_addon_ssh: Addon,
|
||||
capture_exception: Mock,
|
||||
):
|
||||
"""Test error collected and file deleted when folder backup fails."""
|
||||
await coresys.core.set_state(CoreState.RUNNING)
|
||||
coresys.hardware.disk.get_disk_free_space = lambda x: 5000
|
||||
|
||||
backup_instance = backup_mock.return_value
|
||||
|
||||
backup_instance.store_folders.side_effect = (
|
||||
err := BackupError("Error while storing folders")
|
||||
)
|
||||
|
||||
job, backup_task = coresys.jobs.schedule_job(
|
||||
coresys.backups.do_backup_full, JobSchedulerOptions()
|
||||
)
|
||||
assert await backup_task is None
|
||||
|
||||
assert job.errors[0].type_ is type(err)
|
||||
assert job.errors[0].message == str(err)
|
||||
assert job.errors[0].stage == BackupJobStage.FOLDERS
|
||||
|
||||
backup_instance.tarfile.unlink.assert_called_once()
|
||||
|
||||
|
||||
async def test_backup_error_capture(
|
||||
coresys: CoreSys,
|
||||
backup_mock: MagicMock,
|
||||
|
Loading…
x
Reference in New Issue
Block a user