mirror of
https://github.com/home-assistant/core.git
synced 2025-04-26 10:17:51 +00:00
Delete old addon update backups when updating addon (#136977)
* Delete old addon update backups when updating addon * Address review comments * Add tests
This commit is contained in:
parent
c77bca1e44
commit
26ae498974
@ -2,8 +2,6 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
|
||||||
from collections.abc import Callable
|
|
||||||
from dataclasses import dataclass, field, replace
|
from dataclasses import dataclass, field, replace
|
||||||
import datetime as dt
|
import datetime as dt
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
@ -252,7 +250,7 @@ class RetentionConfig:
|
|||||||
"""Delete backups older than days."""
|
"""Delete backups older than days."""
|
||||||
self._schedule_next(manager)
|
self._schedule_next(manager)
|
||||||
|
|
||||||
def _backups_filter(
|
def _delete_filter(
|
||||||
backups: dict[str, ManagerBackup],
|
backups: dict[str, ManagerBackup],
|
||||||
) -> dict[str, ManagerBackup]:
|
) -> dict[str, ManagerBackup]:
|
||||||
"""Return backups older than days to delete."""
|
"""Return backups older than days to delete."""
|
||||||
@ -269,7 +267,9 @@ class RetentionConfig:
|
|||||||
< now
|
< now
|
||||||
}
|
}
|
||||||
|
|
||||||
await _delete_filtered_backups(manager, _backups_filter)
|
await manager.async_delete_filtered_backups(
|
||||||
|
include_filter=_automatic_backups_filter, delete_filter=_delete_filter
|
||||||
|
)
|
||||||
|
|
||||||
manager.remove_next_delete_event = async_call_later(
|
manager.remove_next_delete_event = async_call_later(
|
||||||
manager.hass, timedelta(days=1), _delete_backups
|
manager.hass, timedelta(days=1), _delete_backups
|
||||||
@ -521,74 +521,21 @@ class CreateBackupParametersDict(TypedDict, total=False):
|
|||||||
password: str | None
|
password: str | None
|
||||||
|
|
||||||
|
|
||||||
async def _delete_filtered_backups(
|
def _automatic_backups_filter(
|
||||||
manager: BackupManager,
|
backups: dict[str, ManagerBackup],
|
||||||
backup_filter: Callable[[dict[str, ManagerBackup]], dict[str, ManagerBackup]],
|
) -> dict[str, ManagerBackup]:
|
||||||
) -> None:
|
"""Return automatic backups."""
|
||||||
"""Delete backups parsed with a filter.
|
return {
|
||||||
|
|
||||||
:param manager: The backup manager.
|
|
||||||
:param backup_filter: A filter that should return the backups to delete.
|
|
||||||
"""
|
|
||||||
backups, get_agent_errors = await manager.async_get_backups()
|
|
||||||
if get_agent_errors:
|
|
||||||
LOGGER.debug(
|
|
||||||
"Error getting backups; continuing anyway: %s",
|
|
||||||
get_agent_errors,
|
|
||||||
)
|
|
||||||
|
|
||||||
# only delete backups that are created with the saved automatic settings
|
|
||||||
backups = {
|
|
||||||
backup_id: backup
|
backup_id: backup
|
||||||
for backup_id, backup in backups.items()
|
for backup_id, backup in backups.items()
|
||||||
if backup.with_automatic_settings
|
if backup.with_automatic_settings
|
||||||
}
|
}
|
||||||
|
|
||||||
LOGGER.debug("Total automatic backups: %s", backups)
|
|
||||||
|
|
||||||
filtered_backups = backup_filter(backups)
|
|
||||||
|
|
||||||
if not filtered_backups:
|
|
||||||
return
|
|
||||||
|
|
||||||
# always delete oldest backup first
|
|
||||||
filtered_backups = dict(
|
|
||||||
sorted(
|
|
||||||
filtered_backups.items(),
|
|
||||||
key=lambda backup_item: backup_item[1].date,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if len(filtered_backups) >= len(backups):
|
|
||||||
# Never delete the last backup.
|
|
||||||
last_backup = filtered_backups.popitem()
|
|
||||||
LOGGER.debug("Keeping the last backup: %s", last_backup)
|
|
||||||
|
|
||||||
LOGGER.debug("Backups to delete: %s", filtered_backups)
|
|
||||||
|
|
||||||
if not filtered_backups:
|
|
||||||
return
|
|
||||||
|
|
||||||
backup_ids = list(filtered_backups)
|
|
||||||
delete_results = await asyncio.gather(
|
|
||||||
*(manager.async_delete_backup(backup_id) for backup_id in filtered_backups)
|
|
||||||
)
|
|
||||||
agent_errors = {
|
|
||||||
backup_id: error
|
|
||||||
for backup_id, error in zip(backup_ids, delete_results, strict=True)
|
|
||||||
if error
|
|
||||||
}
|
|
||||||
if agent_errors:
|
|
||||||
LOGGER.error(
|
|
||||||
"Error deleting old copies: %s",
|
|
||||||
agent_errors,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def delete_backups_exceeding_configured_count(manager: BackupManager) -> None:
|
async def delete_backups_exceeding_configured_count(manager: BackupManager) -> None:
|
||||||
"""Delete backups exceeding the configured retention count."""
|
"""Delete backups exceeding the configured retention count."""
|
||||||
|
|
||||||
def _backups_filter(
|
def _delete_filter(
|
||||||
backups: dict[str, ManagerBackup],
|
backups: dict[str, ManagerBackup],
|
||||||
) -> dict[str, ManagerBackup]:
|
) -> dict[str, ManagerBackup]:
|
||||||
"""Return oldest backups more numerous than copies to delete."""
|
"""Return oldest backups more numerous than copies to delete."""
|
||||||
@ -603,4 +550,6 @@ async def delete_backups_exceeding_configured_count(manager: BackupManager) -> N
|
|||||||
)[: max(len(backups) - manager.config.data.retention.copies, 0)]
|
)[: max(len(backups) - manager.config.data.retention.copies, 0)]
|
||||||
)
|
)
|
||||||
|
|
||||||
await _delete_filtered_backups(manager, _backups_filter)
|
await manager.async_delete_filtered_backups(
|
||||||
|
include_filter=_automatic_backups_filter, delete_filter=_delete_filter
|
||||||
|
)
|
||||||
|
@ -685,6 +685,70 @@ class BackupManager:
|
|||||||
|
|
||||||
return agent_errors
|
return agent_errors
|
||||||
|
|
||||||
|
async def async_delete_filtered_backups(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
include_filter: Callable[[dict[str, ManagerBackup]], dict[str, ManagerBackup]],
|
||||||
|
delete_filter: Callable[[dict[str, ManagerBackup]], dict[str, ManagerBackup]],
|
||||||
|
) -> None:
|
||||||
|
"""Delete backups parsed with a filter.
|
||||||
|
|
||||||
|
:param include_filter: A filter that should return the backups to consider for
|
||||||
|
deletion. Note: The newest of the backups returned by include_filter will
|
||||||
|
unconditionally be kept, even if delete_filter returns all backups.
|
||||||
|
:param delete_filter: A filter that should return the backups to delete.
|
||||||
|
"""
|
||||||
|
backups, get_agent_errors = await self.async_get_backups()
|
||||||
|
if get_agent_errors:
|
||||||
|
LOGGER.debug(
|
||||||
|
"Error getting backups; continuing anyway: %s",
|
||||||
|
get_agent_errors,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Run the include filter first to ensure we only consider backups that
|
||||||
|
# should be included in the deletion process.
|
||||||
|
backups = include_filter(backups)
|
||||||
|
|
||||||
|
LOGGER.debug("Total automatic backups: %s", backups)
|
||||||
|
|
||||||
|
backups_to_delete = delete_filter(backups)
|
||||||
|
|
||||||
|
if not backups_to_delete:
|
||||||
|
return
|
||||||
|
|
||||||
|
# always delete oldest backup first
|
||||||
|
backups_to_delete = dict(
|
||||||
|
sorted(
|
||||||
|
backups_to_delete.items(),
|
||||||
|
key=lambda backup_item: backup_item[1].date,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(backups_to_delete) >= len(backups):
|
||||||
|
# Never delete the last backup.
|
||||||
|
last_backup = backups_to_delete.popitem()
|
||||||
|
LOGGER.debug("Keeping the last backup: %s", last_backup)
|
||||||
|
|
||||||
|
LOGGER.debug("Backups to delete: %s", backups_to_delete)
|
||||||
|
|
||||||
|
if not backups_to_delete:
|
||||||
|
return
|
||||||
|
|
||||||
|
backup_ids = list(backups_to_delete)
|
||||||
|
delete_results = await asyncio.gather(
|
||||||
|
*(self.async_delete_backup(backup_id) for backup_id in backups_to_delete)
|
||||||
|
)
|
||||||
|
agent_errors = {
|
||||||
|
backup_id: error
|
||||||
|
for backup_id, error in zip(backup_ids, delete_results, strict=True)
|
||||||
|
if error
|
||||||
|
}
|
||||||
|
if agent_errors:
|
||||||
|
LOGGER.error(
|
||||||
|
"Error deleting old copies: %s",
|
||||||
|
agent_errors,
|
||||||
|
)
|
||||||
|
|
||||||
async def async_receive_backup(
|
async def async_receive_backup(
|
||||||
self,
|
self,
|
||||||
*,
|
*,
|
||||||
|
@ -33,6 +33,7 @@ from homeassistant.components.backup import (
|
|||||||
Folder,
|
Folder,
|
||||||
IdleEvent,
|
IdleEvent,
|
||||||
IncorrectPasswordError,
|
IncorrectPasswordError,
|
||||||
|
ManagerBackup,
|
||||||
NewBackup,
|
NewBackup,
|
||||||
RestoreBackupEvent,
|
RestoreBackupEvent,
|
||||||
RestoreBackupState,
|
RestoreBackupState,
|
||||||
@ -51,6 +52,8 @@ LOCATION_CLOUD_BACKUP = ".cloud_backup"
|
|||||||
LOCATION_LOCAL = ".local"
|
LOCATION_LOCAL = ".local"
|
||||||
MOUNT_JOBS = ("mount_manager_create_mount", "mount_manager_remove_mount")
|
MOUNT_JOBS = ("mount_manager_create_mount", "mount_manager_remove_mount")
|
||||||
RESTORE_JOB_ID_ENV = "SUPERVISOR_RESTORE_JOB_ID"
|
RESTORE_JOB_ID_ENV = "SUPERVISOR_RESTORE_JOB_ID"
|
||||||
|
# Set on backups automatically created when updating an addon
|
||||||
|
TAG_ADDON_UPDATE = "supervisor.addon_update"
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@ -614,10 +617,20 @@ async def backup_addon_before_update(
|
|||||||
else:
|
else:
|
||||||
password = None
|
password = None
|
||||||
|
|
||||||
|
def addon_update_backup_filter(
|
||||||
|
backups: dict[str, ManagerBackup],
|
||||||
|
) -> dict[str, ManagerBackup]:
|
||||||
|
"""Return addon update backups."""
|
||||||
|
return {
|
||||||
|
backup_id: backup
|
||||||
|
for backup_id, backup in backups.items()
|
||||||
|
if backup.extra_metadata.get(TAG_ADDON_UPDATE) == addon
|
||||||
|
}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await backup_manager.async_create_backup(
|
await backup_manager.async_create_backup(
|
||||||
agent_ids=[await _default_agent(client)],
|
agent_ids=[await _default_agent(client)],
|
||||||
extra_metadata={"supervisor.addon_update": addon},
|
extra_metadata={TAG_ADDON_UPDATE: addon},
|
||||||
include_addons=[addon],
|
include_addons=[addon],
|
||||||
include_all_addons=False,
|
include_all_addons=False,
|
||||||
include_database=False,
|
include_database=False,
|
||||||
@ -628,6 +641,14 @@ async def backup_addon_before_update(
|
|||||||
)
|
)
|
||||||
except BackupManagerError as err:
|
except BackupManagerError as err:
|
||||||
raise HomeAssistantError(f"Error creating backup: {err}") from err
|
raise HomeAssistantError(f"Error creating backup: {err}") from err
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
await backup_manager.async_delete_filtered_backups(
|
||||||
|
include_filter=addon_update_backup_filter,
|
||||||
|
delete_filter=lambda backups: backups,
|
||||||
|
)
|
||||||
|
except BackupManagerError as err:
|
||||||
|
raise HomeAssistantError(f"Error deleting old backups: {err}") from err
|
||||||
|
|
||||||
|
|
||||||
async def backup_core_before_update(hass: HomeAssistant) -> None:
|
async def backup_core_before_update(hass: HomeAssistant) -> None:
|
||||||
|
@ -3,13 +3,13 @@
|
|||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
import os
|
import os
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from unittest.mock import AsyncMock, patch
|
from unittest.mock import AsyncMock, MagicMock, patch
|
||||||
|
|
||||||
from aiohasupervisor import SupervisorBadRequestError, SupervisorError
|
from aiohasupervisor import SupervisorBadRequestError, SupervisorError
|
||||||
from aiohasupervisor.models import HomeAssistantUpdateOptions, StoreAddonUpdate
|
from aiohasupervisor.models import HomeAssistantUpdateOptions, StoreAddonUpdate
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from homeassistant.components.backup import BackupManagerError
|
from homeassistant.components.backup import BackupManagerError, ManagerBackup
|
||||||
from homeassistant.components.hassio import DOMAIN
|
from homeassistant.components.hassio import DOMAIN
|
||||||
from homeassistant.components.hassio.const import REQUEST_REFRESH_DELAY
|
from homeassistant.components.hassio.const import REQUEST_REFRESH_DELAY
|
||||||
from homeassistant.const import __version__ as HAVERSION
|
from homeassistant.const import __version__ as HAVERSION
|
||||||
@ -338,6 +338,113 @@ async def test_update_addon_with_backup(
|
|||||||
update_addon.assert_called_once_with("test", StoreAddonUpdate(backup=False))
|
update_addon.assert_called_once_with("test", StoreAddonUpdate(backup=False))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
("backups", "removed_backups"),
|
||||||
|
[
|
||||||
|
(
|
||||||
|
{},
|
||||||
|
[],
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{
|
||||||
|
"backup-1": MagicMock(
|
||||||
|
date="2024-11-10T04:45:00+01:00",
|
||||||
|
with_automatic_settings=True,
|
||||||
|
spec=ManagerBackup,
|
||||||
|
),
|
||||||
|
"backup-2": MagicMock(
|
||||||
|
date="2024-11-11T04:45:00+01:00",
|
||||||
|
with_automatic_settings=False,
|
||||||
|
spec=ManagerBackup,
|
||||||
|
),
|
||||||
|
"backup-3": MagicMock(
|
||||||
|
date="2024-11-11T04:45:00+01:00",
|
||||||
|
extra_metadata={"supervisor.addon_update": "other"},
|
||||||
|
with_automatic_settings=True,
|
||||||
|
spec=ManagerBackup,
|
||||||
|
),
|
||||||
|
"backup-4": MagicMock(
|
||||||
|
date="2024-11-11T04:45:00+01:00",
|
||||||
|
extra_metadata={"supervisor.addon_update": "other"},
|
||||||
|
with_automatic_settings=True,
|
||||||
|
spec=ManagerBackup,
|
||||||
|
),
|
||||||
|
"backup-5": MagicMock(
|
||||||
|
date="2024-11-11T04:45:00+01:00",
|
||||||
|
extra_metadata={"supervisor.addon_update": "test"},
|
||||||
|
with_automatic_settings=True,
|
||||||
|
spec=ManagerBackup,
|
||||||
|
),
|
||||||
|
"backup-6": MagicMock(
|
||||||
|
date="2024-11-12T04:45:00+01:00",
|
||||||
|
extra_metadata={"supervisor.addon_update": "test"},
|
||||||
|
with_automatic_settings=True,
|
||||||
|
spec=ManagerBackup,
|
||||||
|
),
|
||||||
|
},
|
||||||
|
["backup-5"],
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
async def test_update_addon_with_backup_removes_old_backups(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
supervisor_client: AsyncMock,
|
||||||
|
update_addon: AsyncMock,
|
||||||
|
backups: dict[str, ManagerBackup],
|
||||||
|
removed_backups: list[str],
|
||||||
|
) -> None:
|
||||||
|
"""Test updating addon update entity."""
|
||||||
|
config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN)
|
||||||
|
config_entry.add_to_hass(hass)
|
||||||
|
|
||||||
|
with patch.dict(os.environ, MOCK_ENVIRON):
|
||||||
|
result = await async_setup_component(
|
||||||
|
hass,
|
||||||
|
"hassio",
|
||||||
|
{"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}},
|
||||||
|
)
|
||||||
|
assert result
|
||||||
|
assert await async_setup_component(hass, "backup", {})
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
supervisor_client.mounts.info.return_value.default_backup_mount = None
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"homeassistant.components.backup.manager.BackupManager.async_create_backup",
|
||||||
|
) as mock_create_backup,
|
||||||
|
patch(
|
||||||
|
"homeassistant.components.backup.manager.BackupManager.async_delete_backup",
|
||||||
|
autospec=True,
|
||||||
|
return_value={},
|
||||||
|
) as async_delete_backup,
|
||||||
|
patch(
|
||||||
|
"homeassistant.components.backup.manager.BackupManager.async_get_backups",
|
||||||
|
return_value=(backups, {}),
|
||||||
|
),
|
||||||
|
):
|
||||||
|
await hass.services.async_call(
|
||||||
|
"update",
|
||||||
|
"install",
|
||||||
|
{"entity_id": "update.test_update", "backup": True},
|
||||||
|
blocking=True,
|
||||||
|
)
|
||||||
|
mock_create_backup.assert_called_once_with(
|
||||||
|
agent_ids=["hassio.local"],
|
||||||
|
extra_metadata={"supervisor.addon_update": "test"},
|
||||||
|
include_addons=["test"],
|
||||||
|
include_all_addons=False,
|
||||||
|
include_database=False,
|
||||||
|
include_folders=None,
|
||||||
|
include_homeassistant=False,
|
||||||
|
name="test 2.0.0",
|
||||||
|
password=None,
|
||||||
|
)
|
||||||
|
assert len(async_delete_backup.mock_calls) == len(removed_backups)
|
||||||
|
for call in async_delete_backup.mock_calls:
|
||||||
|
assert call.args[1] in removed_backups
|
||||||
|
update_addon.assert_called_once_with("test", StoreAddonUpdate(backup=False))
|
||||||
|
|
||||||
|
|
||||||
async def test_update_os(hass: HomeAssistant, supervisor_client: AsyncMock) -> None:
|
async def test_update_os(hass: HomeAssistant, supervisor_client: AsyncMock) -> None:
|
||||||
"""Test updating OS update entity."""
|
"""Test updating OS update entity."""
|
||||||
config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN)
|
config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN)
|
||||||
@ -550,9 +657,19 @@ async def test_update_addon_with_error(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
("create_backup_error", "delete_filtered_backups_error", "message"),
|
||||||
|
[
|
||||||
|
(BackupManagerError, None, r"^Error creating backup: "),
|
||||||
|
(None, BackupManagerError, r"^Error deleting old backups: "),
|
||||||
|
],
|
||||||
|
)
|
||||||
async def test_update_addon_with_backup_and_error(
|
async def test_update_addon_with_backup_and_error(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
supervisor_client: AsyncMock,
|
supervisor_client: AsyncMock,
|
||||||
|
create_backup_error: Exception | None,
|
||||||
|
delete_filtered_backups_error: Exception | None,
|
||||||
|
message: str,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test updating addon update entity with error."""
|
"""Test updating addon update entity with error."""
|
||||||
config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN)
|
config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN)
|
||||||
@ -573,9 +690,13 @@ async def test_update_addon_with_backup_and_error(
|
|||||||
with (
|
with (
|
||||||
patch(
|
patch(
|
||||||
"homeassistant.components.backup.manager.BackupManager.async_create_backup",
|
"homeassistant.components.backup.manager.BackupManager.async_create_backup",
|
||||||
side_effect=BackupManagerError,
|
side_effect=create_backup_error,
|
||||||
),
|
),
|
||||||
pytest.raises(HomeAssistantError, match=r"^Error creating backup:"),
|
patch(
|
||||||
|
"homeassistant.components.backup.manager.BackupManager.async_delete_filtered_backups",
|
||||||
|
side_effect=delete_filtered_backups_error,
|
||||||
|
),
|
||||||
|
pytest.raises(HomeAssistantError, match=message),
|
||||||
):
|
):
|
||||||
assert not await hass.services.async_call(
|
assert not await hass.services.async_call(
|
||||||
"update",
|
"update",
|
||||||
|
@ -2,13 +2,13 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from unittest.mock import AsyncMock, patch
|
from unittest.mock import AsyncMock, MagicMock, patch
|
||||||
|
|
||||||
from aiohasupervisor import SupervisorError
|
from aiohasupervisor import SupervisorError
|
||||||
from aiohasupervisor.models import HomeAssistantUpdateOptions, StoreAddonUpdate
|
from aiohasupervisor.models import HomeAssistantUpdateOptions, StoreAddonUpdate
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from homeassistant.components.backup import BackupManagerError
|
from homeassistant.components.backup import BackupManagerError, ManagerBackup
|
||||||
from homeassistant.components.hassio import DOMAIN
|
from homeassistant.components.hassio import DOMAIN
|
||||||
from homeassistant.components.hassio.const import (
|
from homeassistant.components.hassio.const import (
|
||||||
ATTR_DATA,
|
ATTR_DATA,
|
||||||
@ -457,6 +457,114 @@ async def test_update_addon_with_backup(
|
|||||||
update_addon.assert_called_once_with("test", StoreAddonUpdate(backup=False))
|
update_addon.assert_called_once_with("test", StoreAddonUpdate(backup=False))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
("backups", "removed_backups"),
|
||||||
|
[
|
||||||
|
(
|
||||||
|
{},
|
||||||
|
[],
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{
|
||||||
|
"backup-1": MagicMock(
|
||||||
|
date="2024-11-10T04:45:00+01:00",
|
||||||
|
with_automatic_settings=True,
|
||||||
|
spec=ManagerBackup,
|
||||||
|
),
|
||||||
|
"backup-2": MagicMock(
|
||||||
|
date="2024-11-11T04:45:00+01:00",
|
||||||
|
with_automatic_settings=False,
|
||||||
|
spec=ManagerBackup,
|
||||||
|
),
|
||||||
|
"backup-3": MagicMock(
|
||||||
|
date="2024-11-11T04:45:00+01:00",
|
||||||
|
extra_metadata={"supervisor.addon_update": "other"},
|
||||||
|
with_automatic_settings=True,
|
||||||
|
spec=ManagerBackup,
|
||||||
|
),
|
||||||
|
"backup-4": MagicMock(
|
||||||
|
date="2024-11-11T04:45:00+01:00",
|
||||||
|
extra_metadata={"supervisor.addon_update": "other"},
|
||||||
|
with_automatic_settings=True,
|
||||||
|
spec=ManagerBackup,
|
||||||
|
),
|
||||||
|
"backup-5": MagicMock(
|
||||||
|
date="2024-11-11T04:45:00+01:00",
|
||||||
|
extra_metadata={"supervisor.addon_update": "test"},
|
||||||
|
with_automatic_settings=True,
|
||||||
|
spec=ManagerBackup,
|
||||||
|
),
|
||||||
|
"backup-6": MagicMock(
|
||||||
|
date="2024-11-12T04:45:00+01:00",
|
||||||
|
extra_metadata={"supervisor.addon_update": "test"},
|
||||||
|
with_automatic_settings=True,
|
||||||
|
spec=ManagerBackup,
|
||||||
|
),
|
||||||
|
},
|
||||||
|
["backup-5"],
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
async def test_update_addon_with_backup_removes_old_backups(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
hass_ws_client: WebSocketGenerator,
|
||||||
|
supervisor_client: AsyncMock,
|
||||||
|
update_addon: AsyncMock,
|
||||||
|
backups: dict[str, ManagerBackup],
|
||||||
|
removed_backups: list[str],
|
||||||
|
) -> None:
|
||||||
|
"""Test updating addon update entity."""
|
||||||
|
config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN)
|
||||||
|
config_entry.add_to_hass(hass)
|
||||||
|
|
||||||
|
with patch.dict(os.environ, MOCK_ENVIRON):
|
||||||
|
result = await async_setup_component(
|
||||||
|
hass,
|
||||||
|
"hassio",
|
||||||
|
{"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}},
|
||||||
|
)
|
||||||
|
assert result
|
||||||
|
assert await async_setup_component(hass, "backup", {})
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
client = await hass_ws_client(hass)
|
||||||
|
supervisor_client.mounts.info.return_value.default_backup_mount = None
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"homeassistant.components.backup.manager.BackupManager.async_create_backup",
|
||||||
|
) as mock_create_backup,
|
||||||
|
patch(
|
||||||
|
"homeassistant.components.backup.manager.BackupManager.async_delete_backup",
|
||||||
|
autospec=True,
|
||||||
|
return_value={},
|
||||||
|
) as async_delete_backup,
|
||||||
|
patch(
|
||||||
|
"homeassistant.components.backup.manager.BackupManager.async_get_backups",
|
||||||
|
return_value=(backups, {}),
|
||||||
|
),
|
||||||
|
):
|
||||||
|
await client.send_json_auto_id(
|
||||||
|
{"type": "hassio/update/addon", "addon": "test", "backup": True}
|
||||||
|
)
|
||||||
|
result = await client.receive_json()
|
||||||
|
assert result["success"]
|
||||||
|
mock_create_backup.assert_called_once_with(
|
||||||
|
agent_ids=["hassio.local"],
|
||||||
|
extra_metadata={"supervisor.addon_update": "test"},
|
||||||
|
include_addons=["test"],
|
||||||
|
include_all_addons=False,
|
||||||
|
include_database=False,
|
||||||
|
include_folders=None,
|
||||||
|
include_homeassistant=False,
|
||||||
|
name="test 2.0.0",
|
||||||
|
password=None,
|
||||||
|
)
|
||||||
|
assert len(async_delete_backup.mock_calls) == len(removed_backups)
|
||||||
|
for call in async_delete_backup.mock_calls:
|
||||||
|
assert call.args[1] in removed_backups
|
||||||
|
update_addon.assert_called_once_with("test", StoreAddonUpdate(backup=False))
|
||||||
|
|
||||||
|
|
||||||
async def test_update_core(
|
async def test_update_core(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
hass_ws_client: WebSocketGenerator,
|
hass_ws_client: WebSocketGenerator,
|
||||||
@ -622,10 +730,20 @@ async def test_update_addon_with_error(
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
("create_backup_error", "delete_filtered_backups_error", "message"),
|
||||||
|
[
|
||||||
|
(BackupManagerError, None, "Error creating backup: "),
|
||||||
|
(None, BackupManagerError, "Error deleting old backups: "),
|
||||||
|
],
|
||||||
|
)
|
||||||
async def test_update_addon_with_backup_and_error(
|
async def test_update_addon_with_backup_and_error(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
hass_ws_client: WebSocketGenerator,
|
hass_ws_client: WebSocketGenerator,
|
||||||
supervisor_client: AsyncMock,
|
supervisor_client: AsyncMock,
|
||||||
|
create_backup_error: Exception | None,
|
||||||
|
delete_filtered_backups_error: Exception | None,
|
||||||
|
message: str,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test updating addon with backup and error."""
|
"""Test updating addon with backup and error."""
|
||||||
client = await hass_ws_client(hass)
|
client = await hass_ws_client(hass)
|
||||||
@ -647,7 +765,11 @@ async def test_update_addon_with_backup_and_error(
|
|||||||
with (
|
with (
|
||||||
patch(
|
patch(
|
||||||
"homeassistant.components.backup.manager.BackupManager.async_create_backup",
|
"homeassistant.components.backup.manager.BackupManager.async_create_backup",
|
||||||
side_effect=BackupManagerError,
|
side_effect=create_backup_error,
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"homeassistant.components.backup.manager.BackupManager.async_delete_filtered_backups",
|
||||||
|
side_effect=delete_filtered_backups_error,
|
||||||
),
|
),
|
||||||
):
|
):
|
||||||
await client.send_json_auto_id(
|
await client.send_json_auto_id(
|
||||||
@ -655,10 +777,7 @@ async def test_update_addon_with_backup_and_error(
|
|||||||
)
|
)
|
||||||
result = await client.receive_json()
|
result = await client.receive_json()
|
||||||
assert not result["success"]
|
assert not result["success"]
|
||||||
assert result["error"] == {
|
assert result["error"] == {"code": "home_assistant_error", "message": message}
|
||||||
"code": "home_assistant_error",
|
|
||||||
"message": "Error creating backup: ",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
async def test_update_core_with_error(
|
async def test_update_core_with_error(
|
||||||
|
Loading…
x
Reference in New Issue
Block a user