mirror of
https://github.com/home-assistant/core.git
synced 2025-07-08 05:47:10 +00:00
Bump aiohasupervisor to version 0.3.0 (#137437)
This commit is contained in:
parent
9abea5c5bb
commit
3f2e6d102c
@ -20,6 +20,7 @@ from aiohasupervisor.models import (
|
|||||||
backups as supervisor_backups,
|
backups as supervisor_backups,
|
||||||
mounts as supervisor_mounts,
|
mounts as supervisor_mounts,
|
||||||
)
|
)
|
||||||
|
from aiohasupervisor.models.backups import LOCATION_CLOUD_BACKUP, LOCATION_LOCAL_STORAGE
|
||||||
|
|
||||||
from homeassistant.components.backup import (
|
from homeassistant.components.backup import (
|
||||||
DATA_MANAGER,
|
DATA_MANAGER,
|
||||||
@ -56,8 +57,6 @@ from homeassistant.util.enum import try_parse_enum
|
|||||||
from .const import DOMAIN, EVENT_SUPERVISOR_EVENT
|
from .const import DOMAIN, EVENT_SUPERVISOR_EVENT
|
||||||
from .handler import get_supervisor_client
|
from .handler import get_supervisor_client
|
||||||
|
|
||||||
LOCATION_CLOUD_BACKUP = ".cloud_backup"
|
|
||||||
LOCATION_LOCAL = ".local"
|
|
||||||
MOUNT_JOBS = ("mount_manager_create_mount", "mount_manager_remove_mount")
|
MOUNT_JOBS = ("mount_manager_create_mount", "mount_manager_remove_mount")
|
||||||
RESTORE_JOB_ID_ENV = "SUPERVISOR_RESTORE_JOB_ID"
|
RESTORE_JOB_ID_ENV = "SUPERVISOR_RESTORE_JOB_ID"
|
||||||
# Set on backups automatically created when updating an addon
|
# Set on backups automatically created when updating an addon
|
||||||
@ -72,7 +71,9 @@ async def async_get_backup_agents(
|
|||||||
"""Return the hassio backup agents."""
|
"""Return the hassio backup agents."""
|
||||||
client = get_supervisor_client(hass)
|
client = get_supervisor_client(hass)
|
||||||
mounts = await client.mounts.info()
|
mounts = await client.mounts.info()
|
||||||
agents: list[BackupAgent] = [SupervisorBackupAgent(hass, "local", None)]
|
agents: list[BackupAgent] = [
|
||||||
|
SupervisorBackupAgent(hass, "local", LOCATION_LOCAL_STORAGE)
|
||||||
|
]
|
||||||
for mount in mounts.mounts:
|
for mount in mounts.mounts:
|
||||||
if mount.usage is not supervisor_mounts.MountUsage.BACKUP:
|
if mount.usage is not supervisor_mounts.MountUsage.BACKUP:
|
||||||
continue
|
continue
|
||||||
@ -112,7 +113,7 @@ def async_register_backup_agents_listener(
|
|||||||
|
|
||||||
|
|
||||||
def _backup_details_to_agent_backup(
|
def _backup_details_to_agent_backup(
|
||||||
details: supervisor_backups.BackupComplete, location: str | None
|
details: supervisor_backups.BackupComplete, location: str
|
||||||
) -> AgentBackup:
|
) -> AgentBackup:
|
||||||
"""Convert a supervisor backup details object to an agent backup."""
|
"""Convert a supervisor backup details object to an agent backup."""
|
||||||
homeassistant_included = details.homeassistant is not None
|
homeassistant_included = details.homeassistant is not None
|
||||||
@ -125,7 +126,6 @@ def _backup_details_to_agent_backup(
|
|||||||
for addon in details.addons
|
for addon in details.addons
|
||||||
]
|
]
|
||||||
extra_metadata = details.extra or {}
|
extra_metadata = details.extra or {}
|
||||||
location = location or LOCATION_LOCAL
|
|
||||||
return AgentBackup(
|
return AgentBackup(
|
||||||
addons=addons,
|
addons=addons,
|
||||||
backup_id=details.slug,
|
backup_id=details.slug,
|
||||||
@ -148,7 +148,7 @@ class SupervisorBackupAgent(BackupAgent):
|
|||||||
|
|
||||||
domain = DOMAIN
|
domain = DOMAIN
|
||||||
|
|
||||||
def __init__(self, hass: HomeAssistant, name: str, location: str | None) -> None:
|
def __init__(self, hass: HomeAssistant, name: str, location: str) -> None:
|
||||||
"""Initialize the backup agent."""
|
"""Initialize the backup agent."""
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self._hass = hass
|
self._hass = hass
|
||||||
@ -206,7 +206,7 @@ class SupervisorBackupAgent(BackupAgent):
|
|||||||
backup_list = await self._client.backups.list()
|
backup_list = await self._client.backups.list()
|
||||||
result = []
|
result = []
|
||||||
for backup in backup_list:
|
for backup in backup_list:
|
||||||
if not backup.locations or self.location not in backup.locations:
|
if self.location not in backup.location_attributes:
|
||||||
continue
|
continue
|
||||||
details = await self._client.backups.backup_info(backup.slug)
|
details = await self._client.backups.backup_info(backup.slug)
|
||||||
result.append(_backup_details_to_agent_backup(details, self.location))
|
result.append(_backup_details_to_agent_backup(details, self.location))
|
||||||
@ -222,7 +222,7 @@ class SupervisorBackupAgent(BackupAgent):
|
|||||||
details = await self._client.backups.backup_info(backup_id)
|
details = await self._client.backups.backup_info(backup_id)
|
||||||
except SupervisorNotFoundError:
|
except SupervisorNotFoundError:
|
||||||
return None
|
return None
|
||||||
if self.location not in details.locations:
|
if self.location not in details.location_attributes:
|
||||||
return None
|
return None
|
||||||
return _backup_details_to_agent_backup(details, self.location)
|
return _backup_details_to_agent_backup(details, self.location)
|
||||||
|
|
||||||
@ -295,8 +295,8 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
|||||||
# will be handled by async_upload_backup.
|
# will be handled by async_upload_backup.
|
||||||
# If the lists are the same length, it does not matter which one we send,
|
# If the lists are the same length, it does not matter which one we send,
|
||||||
# we send the encrypted list to have a well defined behavior.
|
# we send the encrypted list to have a well defined behavior.
|
||||||
encrypted_locations: list[str | None] = []
|
encrypted_locations: list[str] = []
|
||||||
decrypted_locations: list[str | None] = []
|
decrypted_locations: list[str] = []
|
||||||
agents_settings = manager.config.data.agents
|
agents_settings = manager.config.data.agents
|
||||||
for hassio_agent in hassio_agents:
|
for hassio_agent in hassio_agents:
|
||||||
if password is not None:
|
if password is not None:
|
||||||
@ -353,12 +353,12 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
|||||||
eager_start=False, # To ensure the task is not started before we return
|
eager_start=False, # To ensure the task is not started before we return
|
||||||
)
|
)
|
||||||
|
|
||||||
return (NewBackup(backup_job_id=backup.job_id), backup_task)
|
return (NewBackup(backup_job_id=backup.job_id.hex), backup_task)
|
||||||
|
|
||||||
async def _async_wait_for_backup(
|
async def _async_wait_for_backup(
|
||||||
self,
|
self,
|
||||||
backup: supervisor_backups.NewBackup,
|
backup: supervisor_backups.NewBackup,
|
||||||
locations: list[str | None],
|
locations: list[str],
|
||||||
*,
|
*,
|
||||||
on_progress: Callable[[CreateBackupEvent], None],
|
on_progress: Callable[[CreateBackupEvent], None],
|
||||||
remove_after_upload: bool,
|
remove_after_upload: bool,
|
||||||
@ -508,7 +508,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
|||||||
else None
|
else None
|
||||||
)
|
)
|
||||||
|
|
||||||
restore_location: str | None
|
restore_location: str
|
||||||
if manager.backup_agents[agent_id].domain != DOMAIN:
|
if manager.backup_agents[agent_id].domain != DOMAIN:
|
||||||
# Download the backup to the supervisor. Supervisor will clean up the backup
|
# Download the backup to the supervisor. Supervisor will clean up the backup
|
||||||
# two days after the restore is done.
|
# two days after the restore is done.
|
||||||
@ -577,10 +577,11 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
|||||||
on_progress: Callable[[RestoreBackupEvent | IdleEvent], None],
|
on_progress: Callable[[RestoreBackupEvent | IdleEvent], None],
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Check restore status after core restart."""
|
"""Check restore status after core restart."""
|
||||||
if not (restore_job_id := os.environ.get(RESTORE_JOB_ID_ENV)):
|
if not (restore_job_str := os.environ.get(RESTORE_JOB_ID_ENV)):
|
||||||
_LOGGER.debug("No restore job ID found in environment")
|
_LOGGER.debug("No restore job ID found in environment")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
restore_job_id = UUID(restore_job_str)
|
||||||
_LOGGER.debug("Found restore job ID %s in environment", restore_job_id)
|
_LOGGER.debug("Found restore job ID %s in environment", restore_job_id)
|
||||||
|
|
||||||
sent_event = False
|
sent_event = False
|
||||||
@ -634,7 +635,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
|||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _async_listen_job_events(
|
def _async_listen_job_events(
|
||||||
self, job_id: str, on_event: Callable[[Mapping[str, Any]], None]
|
self, job_id: UUID, on_event: Callable[[Mapping[str, Any]], None]
|
||||||
) -> Callable[[], None]:
|
) -> Callable[[], None]:
|
||||||
"""Listen for job events."""
|
"""Listen for job events."""
|
||||||
|
|
||||||
@ -649,7 +650,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
|||||||
if (
|
if (
|
||||||
data.get("event") != "job"
|
data.get("event") != "job"
|
||||||
or not (event_data := data.get("data"))
|
or not (event_data := data.get("data"))
|
||||||
or event_data.get("uuid") != job_id
|
or event_data.get("uuid") != job_id.hex
|
||||||
):
|
):
|
||||||
return
|
return
|
||||||
on_event(event_data)
|
on_event(event_data)
|
||||||
@ -660,10 +661,10 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
|||||||
return unsub
|
return unsub
|
||||||
|
|
||||||
async def _get_job_state(
|
async def _get_job_state(
|
||||||
self, job_id: str, on_event: Callable[[Mapping[str, Any]], None]
|
self, job_id: UUID, on_event: Callable[[Mapping[str, Any]], None]
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Poll a job for its state."""
|
"""Poll a job for its state."""
|
||||||
job = await self._client.jobs.get_job(UUID(job_id))
|
job = await self._client.jobs.get_job(job_id)
|
||||||
_LOGGER.debug("Job state: %s", job)
|
_LOGGER.debug("Job state: %s", job)
|
||||||
on_event(job.to_dict())
|
on_event(job.to_dict())
|
||||||
|
|
||||||
|
@ -6,6 +6,6 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/hassio",
|
"documentation": "https://www.home-assistant.io/integrations/hassio",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": ["aiohasupervisor==0.2.2b6"],
|
"requirements": ["aiohasupervisor==0.3.0"],
|
||||||
"single_config_entry": true
|
"single_config_entry": true
|
||||||
}
|
}
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
aiodhcpwatcher==1.1.0
|
aiodhcpwatcher==1.1.0
|
||||||
aiodiscover==2.2.2
|
aiodiscover==2.2.2
|
||||||
aiodns==3.2.0
|
aiodns==3.2.0
|
||||||
aiohasupervisor==0.2.2b6
|
aiohasupervisor==0.3.0
|
||||||
aiohttp-asyncmdnsresolver==0.0.3
|
aiohttp-asyncmdnsresolver==0.0.3
|
||||||
aiohttp-fast-zlib==0.2.2
|
aiohttp-fast-zlib==0.2.2
|
||||||
aiohttp==3.11.11
|
aiohttp==3.11.11
|
||||||
|
@ -27,7 +27,7 @@ dependencies = [
|
|||||||
# Integrations may depend on hassio integration without listing it to
|
# Integrations may depend on hassio integration without listing it to
|
||||||
# change behavior based on presence of supervisor. Deprecated with #127228
|
# change behavior based on presence of supervisor. Deprecated with #127228
|
||||||
# Lib can be removed with 2025.11
|
# Lib can be removed with 2025.11
|
||||||
"aiohasupervisor==0.2.2b6",
|
"aiohasupervisor==0.3.0",
|
||||||
"aiohttp==3.11.11",
|
"aiohttp==3.11.11",
|
||||||
"aiohttp_cors==0.7.0",
|
"aiohttp_cors==0.7.0",
|
||||||
"aiohttp-fast-zlib==0.2.2",
|
"aiohttp-fast-zlib==0.2.2",
|
||||||
|
2
requirements.txt
generated
2
requirements.txt
generated
@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
# Home Assistant Core
|
# Home Assistant Core
|
||||||
aiodns==3.2.0
|
aiodns==3.2.0
|
||||||
aiohasupervisor==0.2.2b6
|
aiohasupervisor==0.3.0
|
||||||
aiohttp==3.11.11
|
aiohttp==3.11.11
|
||||||
aiohttp_cors==0.7.0
|
aiohttp_cors==0.7.0
|
||||||
aiohttp-fast-zlib==0.2.2
|
aiohttp-fast-zlib==0.2.2
|
||||||
|
2
requirements_all.txt
generated
2
requirements_all.txt
generated
@ -261,7 +261,7 @@ aioguardian==2022.07.0
|
|||||||
aioharmony==0.4.1
|
aioharmony==0.4.1
|
||||||
|
|
||||||
# homeassistant.components.hassio
|
# homeassistant.components.hassio
|
||||||
aiohasupervisor==0.2.2b6
|
aiohasupervisor==0.3.0
|
||||||
|
|
||||||
# homeassistant.components.home_connect
|
# homeassistant.components.home_connect
|
||||||
aiohomeconnect==0.12.3
|
aiohomeconnect==0.12.3
|
||||||
|
2
requirements_test_all.txt
generated
2
requirements_test_all.txt
generated
@ -246,7 +246,7 @@ aioguardian==2022.07.0
|
|||||||
aioharmony==0.4.1
|
aioharmony==0.4.1
|
||||||
|
|
||||||
# homeassistant.components.hassio
|
# homeassistant.components.hassio
|
||||||
aiohasupervisor==0.2.2b6
|
aiohasupervisor==0.3.0
|
||||||
|
|
||||||
# homeassistant.components.home_connect
|
# homeassistant.components.home_connect
|
||||||
aiohomeconnect==0.12.3
|
aiohomeconnect==0.12.3
|
||||||
|
@ -26,6 +26,7 @@ from aiohasupervisor.models import (
|
|||||||
jobs as supervisor_jobs,
|
jobs as supervisor_jobs,
|
||||||
mounts as supervisor_mounts,
|
mounts as supervisor_mounts,
|
||||||
)
|
)
|
||||||
|
from aiohasupervisor.models.backups import LOCATION_CLOUD_BACKUP, LOCATION_LOCAL_STORAGE
|
||||||
from aiohasupervisor.models.mounts import MountsInfo
|
from aiohasupervisor.models.mounts import MountsInfo
|
||||||
from freezegun.api import FrozenDateTimeFactory
|
from freezegun.api import FrozenDateTimeFactory
|
||||||
import pytest
|
import pytest
|
||||||
@ -39,11 +40,7 @@ from homeassistant.components.backup import (
|
|||||||
Folder,
|
Folder,
|
||||||
)
|
)
|
||||||
from homeassistant.components.hassio import DOMAIN
|
from homeassistant.components.hassio import DOMAIN
|
||||||
from homeassistant.components.hassio.backup import (
|
from homeassistant.components.hassio.backup import RESTORE_JOB_ID_ENV
|
||||||
LOCATION_CLOUD_BACKUP,
|
|
||||||
LOCATION_LOCAL,
|
|
||||||
RESTORE_JOB_ID_ENV,
|
|
||||||
)
|
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.setup import async_setup_component
|
from homeassistant.setup import async_setup_component
|
||||||
|
|
||||||
@ -60,17 +57,12 @@ TEST_BACKUP = supervisor_backups.Backup(
|
|||||||
homeassistant=True,
|
homeassistant=True,
|
||||||
),
|
),
|
||||||
date=datetime.fromisoformat("1970-01-01T00:00:00Z"),
|
date=datetime.fromisoformat("1970-01-01T00:00:00Z"),
|
||||||
location=None,
|
|
||||||
location_attributes={
|
location_attributes={
|
||||||
LOCATION_LOCAL: supervisor_backups.BackupLocationAttributes(
|
LOCATION_LOCAL_STORAGE: supervisor_backups.BackupLocationAttributes(
|
||||||
protected=False, size_bytes=1048576
|
protected=False, size_bytes=1048576
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
locations={None},
|
|
||||||
name="Test",
|
name="Test",
|
||||||
protected=False,
|
|
||||||
size=1.0,
|
|
||||||
size_bytes=1048576,
|
|
||||||
slug="abc123",
|
slug="abc123",
|
||||||
type=supervisor_backups.BackupType.PARTIAL,
|
type=supervisor_backups.BackupType.PARTIAL,
|
||||||
)
|
)
|
||||||
@ -89,14 +81,9 @@ TEST_BACKUP_DETAILS = supervisor_backups.BackupComplete(
|
|||||||
folders=[supervisor_backups.Folder.SHARE],
|
folders=[supervisor_backups.Folder.SHARE],
|
||||||
homeassistant_exclude_database=False,
|
homeassistant_exclude_database=False,
|
||||||
homeassistant="2024.12.0",
|
homeassistant="2024.12.0",
|
||||||
location=TEST_BACKUP.location,
|
|
||||||
location_attributes=TEST_BACKUP.location_attributes,
|
location_attributes=TEST_BACKUP.location_attributes,
|
||||||
locations=TEST_BACKUP.locations,
|
|
||||||
name=TEST_BACKUP.name,
|
name=TEST_BACKUP.name,
|
||||||
protected=TEST_BACKUP.protected,
|
|
||||||
repositories=[],
|
repositories=[],
|
||||||
size=TEST_BACKUP.size,
|
|
||||||
size_bytes=TEST_BACKUP.size_bytes,
|
|
||||||
slug=TEST_BACKUP.slug,
|
slug=TEST_BACKUP.slug,
|
||||||
supervisor_version="2024.11.2",
|
supervisor_version="2024.11.2",
|
||||||
type=TEST_BACKUP.type,
|
type=TEST_BACKUP.type,
|
||||||
@ -110,17 +97,12 @@ TEST_BACKUP_2 = supervisor_backups.Backup(
|
|||||||
homeassistant=False,
|
homeassistant=False,
|
||||||
),
|
),
|
||||||
date=datetime.fromisoformat("1970-01-01T00:00:00Z"),
|
date=datetime.fromisoformat("1970-01-01T00:00:00Z"),
|
||||||
location=None,
|
|
||||||
location_attributes={
|
location_attributes={
|
||||||
LOCATION_LOCAL: supervisor_backups.BackupLocationAttributes(
|
LOCATION_LOCAL_STORAGE: supervisor_backups.BackupLocationAttributes(
|
||||||
protected=False, size_bytes=1048576
|
protected=False, size_bytes=1048576
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
locations={None},
|
|
||||||
name="Test",
|
name="Test",
|
||||||
protected=False,
|
|
||||||
size=1.0,
|
|
||||||
size_bytes=1048576,
|
|
||||||
slug="abc123",
|
slug="abc123",
|
||||||
type=supervisor_backups.BackupType.PARTIAL,
|
type=supervisor_backups.BackupType.PARTIAL,
|
||||||
)
|
)
|
||||||
@ -139,14 +121,9 @@ TEST_BACKUP_DETAILS_2 = supervisor_backups.BackupComplete(
|
|||||||
folders=[supervisor_backups.Folder.SHARE],
|
folders=[supervisor_backups.Folder.SHARE],
|
||||||
homeassistant_exclude_database=False,
|
homeassistant_exclude_database=False,
|
||||||
homeassistant=None,
|
homeassistant=None,
|
||||||
location=TEST_BACKUP_2.location,
|
|
||||||
location_attributes=TEST_BACKUP_2.location_attributes,
|
location_attributes=TEST_BACKUP_2.location_attributes,
|
||||||
locations=TEST_BACKUP_2.locations,
|
|
||||||
name=TEST_BACKUP_2.name,
|
name=TEST_BACKUP_2.name,
|
||||||
protected=TEST_BACKUP_2.protected,
|
|
||||||
repositories=[],
|
repositories=[],
|
||||||
size=TEST_BACKUP_2.size,
|
|
||||||
size_bytes=TEST_BACKUP_2.size_bytes,
|
|
||||||
slug=TEST_BACKUP_2.slug,
|
slug=TEST_BACKUP_2.slug,
|
||||||
supervisor_version="2024.11.2",
|
supervisor_version="2024.11.2",
|
||||||
type=TEST_BACKUP_2.type,
|
type=TEST_BACKUP_2.type,
|
||||||
@ -160,17 +137,12 @@ TEST_BACKUP_3 = supervisor_backups.Backup(
|
|||||||
homeassistant=True,
|
homeassistant=True,
|
||||||
),
|
),
|
||||||
date=datetime.fromisoformat("1970-01-01T00:00:00Z"),
|
date=datetime.fromisoformat("1970-01-01T00:00:00Z"),
|
||||||
location="share",
|
|
||||||
location_attributes={
|
location_attributes={
|
||||||
LOCATION_LOCAL: supervisor_backups.BackupLocationAttributes(
|
LOCATION_LOCAL_STORAGE: supervisor_backups.BackupLocationAttributes(
|
||||||
protected=False, size_bytes=1048576
|
protected=False, size_bytes=1048576
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
locations={"share"},
|
|
||||||
name="Test",
|
name="Test",
|
||||||
protected=False,
|
|
||||||
size=1.0,
|
|
||||||
size_bytes=1048576,
|
|
||||||
slug="abc123",
|
slug="abc123",
|
||||||
type=supervisor_backups.BackupType.PARTIAL,
|
type=supervisor_backups.BackupType.PARTIAL,
|
||||||
)
|
)
|
||||||
@ -189,14 +161,9 @@ TEST_BACKUP_DETAILS_3 = supervisor_backups.BackupComplete(
|
|||||||
folders=[supervisor_backups.Folder.SHARE],
|
folders=[supervisor_backups.Folder.SHARE],
|
||||||
homeassistant_exclude_database=False,
|
homeassistant_exclude_database=False,
|
||||||
homeassistant=None,
|
homeassistant=None,
|
||||||
location=TEST_BACKUP_3.location,
|
|
||||||
location_attributes=TEST_BACKUP_3.location_attributes,
|
location_attributes=TEST_BACKUP_3.location_attributes,
|
||||||
locations=TEST_BACKUP_3.locations,
|
|
||||||
name=TEST_BACKUP_3.name,
|
name=TEST_BACKUP_3.name,
|
||||||
protected=TEST_BACKUP_3.protected,
|
|
||||||
repositories=[],
|
repositories=[],
|
||||||
size=TEST_BACKUP_3.size,
|
|
||||||
size_bytes=TEST_BACKUP_3.size_bytes,
|
|
||||||
slug=TEST_BACKUP_3.slug,
|
slug=TEST_BACKUP_3.slug,
|
||||||
supervisor_version="2024.11.2",
|
supervisor_version="2024.11.2",
|
||||||
type=TEST_BACKUP_3.type,
|
type=TEST_BACKUP_3.type,
|
||||||
@ -211,17 +178,12 @@ TEST_BACKUP_4 = supervisor_backups.Backup(
|
|||||||
homeassistant=True,
|
homeassistant=True,
|
||||||
),
|
),
|
||||||
date=datetime.fromisoformat("1970-01-01T00:00:00Z"),
|
date=datetime.fromisoformat("1970-01-01T00:00:00Z"),
|
||||||
location=None,
|
|
||||||
location_attributes={
|
location_attributes={
|
||||||
LOCATION_LOCAL: supervisor_backups.BackupLocationAttributes(
|
LOCATION_LOCAL_STORAGE: supervisor_backups.BackupLocationAttributes(
|
||||||
protected=False, size_bytes=1048576
|
protected=False, size_bytes=1048576
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
locations={None},
|
|
||||||
name="Test",
|
name="Test",
|
||||||
protected=False,
|
|
||||||
size=1.0,
|
|
||||||
size_bytes=1048576,
|
|
||||||
slug="abc123",
|
slug="abc123",
|
||||||
type=supervisor_backups.BackupType.PARTIAL,
|
type=supervisor_backups.BackupType.PARTIAL,
|
||||||
)
|
)
|
||||||
@ -240,14 +202,9 @@ TEST_BACKUP_DETAILS_4 = supervisor_backups.BackupComplete(
|
|||||||
folders=[supervisor_backups.Folder.SHARE],
|
folders=[supervisor_backups.Folder.SHARE],
|
||||||
homeassistant_exclude_database=True,
|
homeassistant_exclude_database=True,
|
||||||
homeassistant="2024.12.0",
|
homeassistant="2024.12.0",
|
||||||
location=TEST_BACKUP_4.location,
|
|
||||||
location_attributes=TEST_BACKUP_4.location_attributes,
|
location_attributes=TEST_BACKUP_4.location_attributes,
|
||||||
locations=TEST_BACKUP_4.locations,
|
|
||||||
name=TEST_BACKUP_4.name,
|
name=TEST_BACKUP_4.name,
|
||||||
protected=TEST_BACKUP_4.protected,
|
|
||||||
repositories=[],
|
repositories=[],
|
||||||
size=TEST_BACKUP_4.size,
|
|
||||||
size_bytes=TEST_BACKUP_4.size_bytes,
|
|
||||||
slug=TEST_BACKUP_4.slug,
|
slug=TEST_BACKUP_4.slug,
|
||||||
supervisor_version="2024.11.2",
|
supervisor_version="2024.11.2",
|
||||||
type=TEST_BACKUP_4.type,
|
type=TEST_BACKUP_4.type,
|
||||||
@ -261,17 +218,12 @@ TEST_BACKUP_5 = supervisor_backups.Backup(
|
|||||||
homeassistant=True,
|
homeassistant=True,
|
||||||
),
|
),
|
||||||
date=datetime.fromisoformat("1970-01-01T00:00:00Z"),
|
date=datetime.fromisoformat("1970-01-01T00:00:00Z"),
|
||||||
location=LOCATION_CLOUD_BACKUP,
|
|
||||||
location_attributes={
|
location_attributes={
|
||||||
LOCATION_CLOUD_BACKUP: supervisor_backups.BackupLocationAttributes(
|
LOCATION_CLOUD_BACKUP: supervisor_backups.BackupLocationAttributes(
|
||||||
protected=False, size_bytes=1048576
|
protected=False, size_bytes=1048576
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
locations={LOCATION_CLOUD_BACKUP},
|
|
||||||
name="Test",
|
name="Test",
|
||||||
protected=False,
|
|
||||||
size=1.0,
|
|
||||||
size_bytes=1048576,
|
|
||||||
slug="abc123",
|
slug="abc123",
|
||||||
type=supervisor_backups.BackupType.PARTIAL,
|
type=supervisor_backups.BackupType.PARTIAL,
|
||||||
)
|
)
|
||||||
@ -290,14 +242,9 @@ TEST_BACKUP_DETAILS_5 = supervisor_backups.BackupComplete(
|
|||||||
folders=[supervisor_backups.Folder.SHARE],
|
folders=[supervisor_backups.Folder.SHARE],
|
||||||
homeassistant_exclude_database=False,
|
homeassistant_exclude_database=False,
|
||||||
homeassistant="2024.12.0",
|
homeassistant="2024.12.0",
|
||||||
location=TEST_BACKUP_5.location,
|
|
||||||
location_attributes=TEST_BACKUP_5.location_attributes,
|
location_attributes=TEST_BACKUP_5.location_attributes,
|
||||||
locations=TEST_BACKUP_5.locations,
|
|
||||||
name=TEST_BACKUP_5.name,
|
name=TEST_BACKUP_5.name,
|
||||||
protected=TEST_BACKUP_5.protected,
|
|
||||||
repositories=[],
|
repositories=[],
|
||||||
size=TEST_BACKUP_5.size,
|
|
||||||
size_bytes=TEST_BACKUP_5.size_bytes,
|
|
||||||
slug=TEST_BACKUP_5.slug,
|
slug=TEST_BACKUP_5.slug,
|
||||||
supervisor_version="2024.11.2",
|
supervisor_version="2024.11.2",
|
||||||
type=TEST_BACKUP_5.type,
|
type=TEST_BACKUP_5.type,
|
||||||
@ -312,6 +259,7 @@ TEST_JOB_NOT_DONE = supervisor_jobs.Job(
|
|||||||
stage="copy_additional_locations",
|
stage="copy_additional_locations",
|
||||||
done=False,
|
done=False,
|
||||||
errors=[],
|
errors=[],
|
||||||
|
created=datetime.fromisoformat("1970-01-01T00:00:00Z"),
|
||||||
child_jobs=[],
|
child_jobs=[],
|
||||||
)
|
)
|
||||||
TEST_JOB_DONE = supervisor_jobs.Job(
|
TEST_JOB_DONE = supervisor_jobs.Job(
|
||||||
@ -322,6 +270,7 @@ TEST_JOB_DONE = supervisor_jobs.Job(
|
|||||||
stage="copy_additional_locations",
|
stage="copy_additional_locations",
|
||||||
done=True,
|
done=True,
|
||||||
errors=[],
|
errors=[],
|
||||||
|
created=datetime.fromisoformat("1970-01-01T00:00:00Z"),
|
||||||
child_jobs=[],
|
child_jobs=[],
|
||||||
)
|
)
|
||||||
TEST_RESTORE_JOB_DONE_WITH_ERROR = supervisor_jobs.Job(
|
TEST_RESTORE_JOB_DONE_WITH_ERROR = supervisor_jobs.Job(
|
||||||
@ -340,6 +289,7 @@ TEST_RESTORE_JOB_DONE_WITH_ERROR = supervisor_jobs.Job(
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
|
created=datetime.fromisoformat("1970-01-01T00:00:00Z"),
|
||||||
child_jobs=[],
|
child_jobs=[],
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -580,7 +530,10 @@ async def test_agent_download(
|
|||||||
assert await resp.content.read() == b"backup data"
|
assert await resp.content.read() == b"backup data"
|
||||||
|
|
||||||
supervisor_client.backups.download_backup.assert_called_once_with(
|
supervisor_client.backups.download_backup.assert_called_once_with(
|
||||||
"abc123", options=supervisor_backups.DownloadBackupOptions(location=None)
|
"abc123",
|
||||||
|
options=supervisor_backups.DownloadBackupOptions(
|
||||||
|
location=LOCATION_LOCAL_STORAGE
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -766,7 +719,10 @@ async def test_agent_delete_backup(
|
|||||||
assert response["success"]
|
assert response["success"]
|
||||||
assert response["result"] == {"agent_errors": {}}
|
assert response["result"] == {"agent_errors": {}}
|
||||||
supervisor_client.backups.remove_backup.assert_called_once_with(
|
supervisor_client.backups.remove_backup.assert_called_once_with(
|
||||||
backup_id, options=supervisor_backups.RemoveBackupOptions(location={None})
|
backup_id,
|
||||||
|
options=supervisor_backups.RemoveBackupOptions(
|
||||||
|
location={LOCATION_LOCAL_STORAGE}
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -812,7 +768,10 @@ async def test_agent_delete_with_error(
|
|||||||
|
|
||||||
assert response == {"id": 1, "type": "result"} | expected_response
|
assert response == {"id": 1, "type": "result"} | expected_response
|
||||||
supervisor_client.backups.remove_backup.assert_called_once_with(
|
supervisor_client.backups.remove_backup.assert_called_once_with(
|
||||||
backup_id, options=supervisor_backups.RemoveBackupOptions(location={None})
|
backup_id,
|
||||||
|
options=supervisor_backups.RemoveBackupOptions(
|
||||||
|
location={LOCATION_LOCAL_STORAGE}
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -891,7 +850,7 @@ DEFAULT_BACKUP_OPTIONS = supervisor_backups.PartialBackupOptions(
|
|||||||
folders={"ssl"},
|
folders={"ssl"},
|
||||||
homeassistant_exclude_database=False,
|
homeassistant_exclude_database=False,
|
||||||
homeassistant=True,
|
homeassistant=True,
|
||||||
location=[None],
|
location=[LOCATION_LOCAL_STORAGE],
|
||||||
name="Test",
|
name="Test",
|
||||||
password=None,
|
password=None,
|
||||||
)
|
)
|
||||||
@ -947,7 +906,7 @@ async def test_reader_writer_create(
|
|||||||
"""Test generating a backup."""
|
"""Test generating a backup."""
|
||||||
client = await hass_ws_client(hass)
|
client = await hass_ws_client(hass)
|
||||||
freezer.move_to("2025-01-30 13:42:12.345678")
|
freezer.move_to("2025-01-30 13:42:12.345678")
|
||||||
supervisor_client.backups.partial_backup.return_value.job_id = TEST_JOB_ID
|
supervisor_client.backups.partial_backup.return_value.job_id = UUID(TEST_JOB_ID)
|
||||||
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
|
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
|
||||||
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
|
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
|
||||||
|
|
||||||
@ -1022,7 +981,7 @@ async def test_reader_writer_create_report_progress(
|
|||||||
"""Test generating a backup."""
|
"""Test generating a backup."""
|
||||||
client = await hass_ws_client(hass)
|
client = await hass_ws_client(hass)
|
||||||
freezer.move_to("2025-01-30 13:42:12.345678")
|
freezer.move_to("2025-01-30 13:42:12.345678")
|
||||||
supervisor_client.backups.partial_backup.return_value.job_id = TEST_JOB_ID
|
supervisor_client.backups.partial_backup.return_value.job_id = UUID(TEST_JOB_ID)
|
||||||
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
|
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
|
||||||
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
|
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
|
||||||
|
|
||||||
@ -1129,7 +1088,7 @@ async def test_reader_writer_create_job_done(
|
|||||||
"""Test generating a backup, and backup job finishes early."""
|
"""Test generating a backup, and backup job finishes early."""
|
||||||
client = await hass_ws_client(hass)
|
client = await hass_ws_client(hass)
|
||||||
freezer.move_to("2025-01-30 13:42:12.345678")
|
freezer.move_to("2025-01-30 13:42:12.345678")
|
||||||
supervisor_client.backups.partial_backup.return_value.job_id = TEST_JOB_ID
|
supervisor_client.backups.partial_backup.return_value.job_id = UUID(TEST_JOB_ID)
|
||||||
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
|
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
|
||||||
supervisor_client.jobs.get_job.return_value = TEST_JOB_DONE
|
supervisor_client.jobs.get_job.return_value = TEST_JOB_DONE
|
||||||
|
|
||||||
@ -1198,7 +1157,7 @@ async def test_reader_writer_create_job_done(
|
|||||||
None,
|
None,
|
||||||
["hassio.local", "hassio.share1", "hassio.share2", "hassio.share3"],
|
["hassio.local", "hassio.share1", "hassio.share2", "hassio.share3"],
|
||||||
None,
|
None,
|
||||||
[None, "share1", "share2", "share3"],
|
[LOCATION_LOCAL_STORAGE, "share1", "share2", "share3"],
|
||||||
False,
|
False,
|
||||||
[],
|
[],
|
||||||
),
|
),
|
||||||
@ -1207,7 +1166,7 @@ async def test_reader_writer_create_job_done(
|
|||||||
"hunter2",
|
"hunter2",
|
||||||
["hassio.local", "hassio.share1", "hassio.share2", "hassio.share3"],
|
["hassio.local", "hassio.share1", "hassio.share2", "hassio.share3"],
|
||||||
"hunter2",
|
"hunter2",
|
||||||
[None, "share1", "share2", "share3"],
|
[LOCATION_LOCAL_STORAGE, "share1", "share2", "share3"],
|
||||||
True,
|
True,
|
||||||
[],
|
[],
|
||||||
),
|
),
|
||||||
@ -1225,7 +1184,7 @@ async def test_reader_writer_create_job_done(
|
|||||||
"hunter2",
|
"hunter2",
|
||||||
["share1", "share2", "share3"],
|
["share1", "share2", "share3"],
|
||||||
True,
|
True,
|
||||||
[None],
|
[LOCATION_LOCAL_STORAGE],
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
[
|
[
|
||||||
@ -1242,7 +1201,7 @@ async def test_reader_writer_create_job_done(
|
|||||||
"hunter2",
|
"hunter2",
|
||||||
["share2", "share3"],
|
["share2", "share3"],
|
||||||
True,
|
True,
|
||||||
[None, "share1"],
|
[LOCATION_LOCAL_STORAGE, "share1"],
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
[
|
[
|
||||||
@ -1258,7 +1217,7 @@ async def test_reader_writer_create_job_done(
|
|||||||
"hunter2",
|
"hunter2",
|
||||||
["hassio.local", "hassio.share1", "hassio.share2", "hassio.share3"],
|
["hassio.local", "hassio.share1", "hassio.share2", "hassio.share3"],
|
||||||
None,
|
None,
|
||||||
[None, "share1", "share2"],
|
[LOCATION_LOCAL_STORAGE, "share1", "share2"],
|
||||||
True,
|
True,
|
||||||
["share3"],
|
["share3"],
|
||||||
),
|
),
|
||||||
@ -1274,7 +1233,7 @@ async def test_reader_writer_create_job_done(
|
|||||||
"hunter2",
|
"hunter2",
|
||||||
["hassio.local"],
|
["hassio.local"],
|
||||||
None,
|
None,
|
||||||
[None],
|
[LOCATION_LOCAL_STORAGE],
|
||||||
False,
|
False,
|
||||||
[],
|
[],
|
||||||
),
|
),
|
||||||
@ -1312,15 +1271,14 @@ async def test_reader_writer_create_per_agent_encryption(
|
|||||||
for i in range(1, 4)
|
for i in range(1, 4)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
supervisor_client.backups.partial_backup.return_value.job_id = TEST_JOB_ID
|
supervisor_client.backups.partial_backup.return_value.job_id = UUID(TEST_JOB_ID)
|
||||||
supervisor_client.backups.backup_info.return_value = replace(
|
supervisor_client.backups.backup_info.return_value = replace(
|
||||||
TEST_BACKUP_DETAILS,
|
TEST_BACKUP_DETAILS,
|
||||||
extra=DEFAULT_BACKUP_OPTIONS.extra,
|
extra=DEFAULT_BACKUP_OPTIONS.extra,
|
||||||
locations=create_locations,
|
|
||||||
location_attributes={
|
location_attributes={
|
||||||
location or LOCATION_LOCAL: supervisor_backups.BackupLocationAttributes(
|
location: supervisor_backups.BackupLocationAttributes(
|
||||||
protected=create_protected,
|
protected=create_protected,
|
||||||
size_bytes=TEST_BACKUP_DETAILS.size_bytes,
|
size_bytes=1048576,
|
||||||
)
|
)
|
||||||
for location in create_locations
|
for location in create_locations
|
||||||
},
|
},
|
||||||
@ -1514,7 +1472,7 @@ async def test_reader_writer_create_missing_reference_error(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Test missing reference error when generating a backup."""
|
"""Test missing reference error when generating a backup."""
|
||||||
client = await hass_ws_client(hass)
|
client = await hass_ws_client(hass)
|
||||||
supervisor_client.backups.partial_backup.return_value.job_id = TEST_JOB_ID
|
supervisor_client.backups.partial_backup.return_value.job_id = UUID(TEST_JOB_ID)
|
||||||
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
|
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
|
||||||
|
|
||||||
await client.send_json_auto_id({"type": "backup/subscribe_events"})
|
await client.send_json_auto_id({"type": "backup/subscribe_events"})
|
||||||
@ -1581,7 +1539,7 @@ async def test_reader_writer_create_download_remove_error(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Test download and remove error when generating a backup."""
|
"""Test download and remove error when generating a backup."""
|
||||||
client = await hass_ws_client(hass)
|
client = await hass_ws_client(hass)
|
||||||
supervisor_client.backups.partial_backup.return_value.job_id = TEST_JOB_ID
|
supervisor_client.backups.partial_backup.return_value.job_id = UUID(TEST_JOB_ID)
|
||||||
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS_5
|
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS_5
|
||||||
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
|
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
|
||||||
method_mock = getattr(supervisor_client.backups, method)
|
method_mock = getattr(supervisor_client.backups, method)
|
||||||
@ -1668,7 +1626,7 @@ async def test_reader_writer_create_info_error(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Test backup info error when generating a backup."""
|
"""Test backup info error when generating a backup."""
|
||||||
client = await hass_ws_client(hass)
|
client = await hass_ws_client(hass)
|
||||||
supervisor_client.backups.partial_backup.return_value.job_id = TEST_JOB_ID
|
supervisor_client.backups.partial_backup.return_value.job_id = UUID(TEST_JOB_ID)
|
||||||
supervisor_client.backups.backup_info.side_effect = exception
|
supervisor_client.backups.backup_info.side_effect = exception
|
||||||
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
|
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
|
||||||
|
|
||||||
@ -1745,7 +1703,7 @@ async def test_reader_writer_create_remote_backup(
|
|||||||
"""Test generating a backup which will be uploaded to a remote agent."""
|
"""Test generating a backup which will be uploaded to a remote agent."""
|
||||||
client = await hass_ws_client(hass)
|
client = await hass_ws_client(hass)
|
||||||
freezer.move_to("2025-01-30 13:42:12.345678")
|
freezer.move_to("2025-01-30 13:42:12.345678")
|
||||||
supervisor_client.backups.partial_backup.return_value.job_id = TEST_JOB_ID
|
supervisor_client.backups.partial_backup.return_value.job_id = UUID(TEST_JOB_ID)
|
||||||
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS_5
|
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS_5
|
||||||
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
|
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
|
||||||
|
|
||||||
@ -1848,7 +1806,7 @@ async def test_reader_writer_create_wrong_parameters(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Test generating a backup."""
|
"""Test generating a backup."""
|
||||||
client = await hass_ws_client(hass)
|
client = await hass_ws_client(hass)
|
||||||
supervisor_client.backups.partial_backup.return_value.job_id = TEST_JOB_ID
|
supervisor_client.backups.partial_backup.return_value.job_id = UUID(TEST_JOB_ID)
|
||||||
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
|
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
|
||||||
|
|
||||||
await client.send_json_auto_id({"type": "backup/subscribe_events"})
|
await client.send_json_auto_id({"type": "backup/subscribe_events"})
|
||||||
@ -1975,7 +1933,7 @@ async def test_reader_writer_restore(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Test restoring a backup."""
|
"""Test restoring a backup."""
|
||||||
client = await hass_ws_client(hass)
|
client = await hass_ws_client(hass)
|
||||||
supervisor_client.backups.partial_restore.return_value.job_id = TEST_JOB_ID
|
supervisor_client.backups.partial_restore.return_value.job_id = UUID(TEST_JOB_ID)
|
||||||
supervisor_client.backups.list.return_value = [TEST_BACKUP]
|
supervisor_client.backups.list.return_value = [TEST_BACKUP]
|
||||||
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
|
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
|
||||||
supervisor_client.jobs.get_job.return_value = get_job_result
|
supervisor_client.jobs.get_job.return_value = get_job_result
|
||||||
@ -2006,7 +1964,7 @@ async def test_reader_writer_restore(
|
|||||||
background=True,
|
background=True,
|
||||||
folders=None,
|
folders=None,
|
||||||
homeassistant=True,
|
homeassistant=True,
|
||||||
location=None,
|
location=LOCATION_LOCAL_STORAGE,
|
||||||
password=None,
|
password=None,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
@ -2040,7 +1998,7 @@ async def test_reader_writer_restore_report_progress(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Test restoring a backup."""
|
"""Test restoring a backup."""
|
||||||
client = await hass_ws_client(hass)
|
client = await hass_ws_client(hass)
|
||||||
supervisor_client.backups.partial_restore.return_value.job_id = TEST_JOB_ID
|
supervisor_client.backups.partial_restore.return_value.job_id = UUID(TEST_JOB_ID)
|
||||||
supervisor_client.backups.list.return_value = [TEST_BACKUP]
|
supervisor_client.backups.list.return_value = [TEST_BACKUP]
|
||||||
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
|
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
|
||||||
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
|
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
|
||||||
@ -2071,7 +2029,7 @@ async def test_reader_writer_restore_report_progress(
|
|||||||
background=True,
|
background=True,
|
||||||
folders=None,
|
folders=None,
|
||||||
homeassistant=True,
|
homeassistant=True,
|
||||||
location=None,
|
location=LOCATION_LOCAL_STORAGE,
|
||||||
password=None,
|
password=None,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
@ -2193,7 +2151,7 @@ async def test_reader_writer_restore_error(
|
|||||||
background=True,
|
background=True,
|
||||||
folders=None,
|
folders=None,
|
||||||
homeassistant=True,
|
homeassistant=True,
|
||||||
location=None,
|
location=LOCATION_LOCAL_STORAGE,
|
||||||
password=None,
|
password=None,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
@ -2221,7 +2179,7 @@ async def test_reader_writer_restore_late_error(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Test restoring a backup with error."""
|
"""Test restoring a backup with error."""
|
||||||
client = await hass_ws_client(hass)
|
client = await hass_ws_client(hass)
|
||||||
supervisor_client.backups.partial_restore.return_value.job_id = TEST_JOB_ID
|
supervisor_client.backups.partial_restore.return_value.job_id = UUID(TEST_JOB_ID)
|
||||||
supervisor_client.backups.list.return_value = [TEST_BACKUP]
|
supervisor_client.backups.list.return_value = [TEST_BACKUP]
|
||||||
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
|
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
|
||||||
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
|
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
|
||||||
@ -2250,7 +2208,7 @@ async def test_reader_writer_restore_late_error(
|
|||||||
background=True,
|
background=True,
|
||||||
folders=None,
|
folders=None,
|
||||||
homeassistant=True,
|
homeassistant=True,
|
||||||
location=None,
|
location=LOCATION_LOCAL_STORAGE,
|
||||||
password=None,
|
password=None,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user