Bump aiohasupervisor to version 0.3.0 (#137437)

This commit is contained in:
Erik Montnemery 2025-02-05 16:58:39 +01:00 committed by Franck Nijhof
parent c4e2ddd28b
commit d6414b9849
No known key found for this signature in database
GPG Key ID: D62583BA8AB11CA3
8 changed files with 71 additions and 112 deletions

View File

@ -20,6 +20,7 @@ from aiohasupervisor.models import (
backups as supervisor_backups,
mounts as supervisor_mounts,
)
from aiohasupervisor.models.backups import LOCATION_CLOUD_BACKUP, LOCATION_LOCAL_STORAGE
from homeassistant.components.backup import (
DATA_MANAGER,
@ -56,8 +57,6 @@ from homeassistant.util.enum import try_parse_enum
from .const import DOMAIN, EVENT_SUPERVISOR_EVENT
from .handler import get_supervisor_client
LOCATION_CLOUD_BACKUP = ".cloud_backup"
LOCATION_LOCAL = ".local"
MOUNT_JOBS = ("mount_manager_create_mount", "mount_manager_remove_mount")
RESTORE_JOB_ID_ENV = "SUPERVISOR_RESTORE_JOB_ID"
# Set on backups automatically created when updating an addon
@ -72,7 +71,9 @@ async def async_get_backup_agents(
"""Return the hassio backup agents."""
client = get_supervisor_client(hass)
mounts = await client.mounts.info()
agents: list[BackupAgent] = [SupervisorBackupAgent(hass, "local", None)]
agents: list[BackupAgent] = [
SupervisorBackupAgent(hass, "local", LOCATION_LOCAL_STORAGE)
]
for mount in mounts.mounts:
if mount.usage is not supervisor_mounts.MountUsage.BACKUP:
continue
@ -112,7 +113,7 @@ def async_register_backup_agents_listener(
def _backup_details_to_agent_backup(
details: supervisor_backups.BackupComplete, location: str | None
details: supervisor_backups.BackupComplete, location: str
) -> AgentBackup:
"""Convert a supervisor backup details object to an agent backup."""
homeassistant_included = details.homeassistant is not None
@ -125,7 +126,6 @@ def _backup_details_to_agent_backup(
for addon in details.addons
]
extra_metadata = details.extra or {}
location = location or LOCATION_LOCAL
return AgentBackup(
addons=addons,
backup_id=details.slug,
@ -148,7 +148,7 @@ class SupervisorBackupAgent(BackupAgent):
domain = DOMAIN
def __init__(self, hass: HomeAssistant, name: str, location: str | None) -> None:
def __init__(self, hass: HomeAssistant, name: str, location: str) -> None:
"""Initialize the backup agent."""
super().__init__()
self._hass = hass
@ -206,7 +206,7 @@ class SupervisorBackupAgent(BackupAgent):
backup_list = await self._client.backups.list()
result = []
for backup in backup_list:
if not backup.locations or self.location not in backup.locations:
if self.location not in backup.location_attributes:
continue
details = await self._client.backups.backup_info(backup.slug)
result.append(_backup_details_to_agent_backup(details, self.location))
@ -222,7 +222,7 @@ class SupervisorBackupAgent(BackupAgent):
details = await self._client.backups.backup_info(backup_id)
except SupervisorNotFoundError:
return None
if self.location not in details.locations:
if self.location not in details.location_attributes:
return None
return _backup_details_to_agent_backup(details, self.location)
@ -295,8 +295,8 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
# will be handled by async_upload_backup.
# If the lists are the same length, it does not matter which one we send,
# we send the encrypted list to have a well defined behavior.
encrypted_locations: list[str | None] = []
decrypted_locations: list[str | None] = []
encrypted_locations: list[str] = []
decrypted_locations: list[str] = []
agents_settings = manager.config.data.agents
for hassio_agent in hassio_agents:
if password is not None:
@ -353,12 +353,12 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
eager_start=False, # To ensure the task is not started before we return
)
return (NewBackup(backup_job_id=backup.job_id), backup_task)
return (NewBackup(backup_job_id=backup.job_id.hex), backup_task)
async def _async_wait_for_backup(
self,
backup: supervisor_backups.NewBackup,
locations: list[str | None],
locations: list[str],
*,
on_progress: Callable[[CreateBackupEvent], None],
remove_after_upload: bool,
@ -508,7 +508,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
else None
)
restore_location: str | None
restore_location: str
if manager.backup_agents[agent_id].domain != DOMAIN:
# Download the backup to the supervisor. Supervisor will clean up the backup
# two days after the restore is done.
@ -577,10 +577,11 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
on_progress: Callable[[RestoreBackupEvent | IdleEvent], None],
) -> None:
"""Check restore status after core restart."""
if not (restore_job_id := os.environ.get(RESTORE_JOB_ID_ENV)):
if not (restore_job_str := os.environ.get(RESTORE_JOB_ID_ENV)):
_LOGGER.debug("No restore job ID found in environment")
return
restore_job_id = UUID(restore_job_str)
_LOGGER.debug("Found restore job ID %s in environment", restore_job_id)
sent_event = False
@ -634,7 +635,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
@callback
def _async_listen_job_events(
self, job_id: str, on_event: Callable[[Mapping[str, Any]], None]
self, job_id: UUID, on_event: Callable[[Mapping[str, Any]], None]
) -> Callable[[], None]:
"""Listen for job events."""
@ -649,7 +650,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
if (
data.get("event") != "job"
or not (event_data := data.get("data"))
or event_data.get("uuid") != job_id
or event_data.get("uuid") != job_id.hex
):
return
on_event(event_data)
@ -660,10 +661,10 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
return unsub
async def _get_job_state(
self, job_id: str, on_event: Callable[[Mapping[str, Any]], None]
self, job_id: UUID, on_event: Callable[[Mapping[str, Any]], None]
) -> None:
"""Poll a job for its state."""
job = await self._client.jobs.get_job(UUID(job_id))
job = await self._client.jobs.get_job(job_id)
_LOGGER.debug("Job state: %s", job)
on_event(job.to_dict())

View File

@ -6,6 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/hassio",
"iot_class": "local_polling",
"quality_scale": "internal",
"requirements": ["aiohasupervisor==0.2.2b6"],
"requirements": ["aiohasupervisor==0.3.0"],
"single_config_entry": true
}

View File

@ -3,7 +3,7 @@
aiodhcpwatcher==1.0.3
aiodiscover==2.1.0
aiodns==3.2.0
aiohasupervisor==0.2.2b6
aiohasupervisor==0.3.0
aiohttp-asyncmdnsresolver==0.0.3
aiohttp-fast-zlib==0.2.0
aiohttp==3.11.11

View File

@ -27,7 +27,7 @@ dependencies = [
# Integrations may depend on hassio integration without listing it to
# change behavior based on presence of supervisor. Deprecated with #127228
# Lib can be removed with 2025.11
"aiohasupervisor==0.2.2b6",
"aiohasupervisor==0.3.0",
"aiohttp==3.11.11",
"aiohttp_cors==0.7.0",
"aiohttp-fast-zlib==0.2.0",

2
requirements.txt generated
View File

@ -4,7 +4,7 @@
# Home Assistant Core
aiodns==3.2.0
aiohasupervisor==0.2.2b6
aiohasupervisor==0.3.0
aiohttp==3.11.11
aiohttp_cors==0.7.0
aiohttp-fast-zlib==0.2.0

2
requirements_all.txt generated
View File

@ -261,7 +261,7 @@ aioguardian==2022.07.0
aioharmony==0.4.1
# homeassistant.components.hassio
aiohasupervisor==0.2.2b6
aiohasupervisor==0.3.0
# homeassistant.components.homekit_controller
aiohomekit==3.2.7

View File

@ -246,7 +246,7 @@ aioguardian==2022.07.0
aioharmony==0.4.1
# homeassistant.components.hassio
aiohasupervisor==0.2.2b6
aiohasupervisor==0.3.0
# homeassistant.components.homekit_controller
aiohomekit==3.2.7

View File

@ -26,6 +26,7 @@ from aiohasupervisor.models import (
jobs as supervisor_jobs,
mounts as supervisor_mounts,
)
from aiohasupervisor.models.backups import LOCATION_CLOUD_BACKUP, LOCATION_LOCAL_STORAGE
from aiohasupervisor.models.mounts import MountsInfo
from freezegun.api import FrozenDateTimeFactory
import pytest
@ -39,11 +40,7 @@ from homeassistant.components.backup import (
Folder,
)
from homeassistant.components.hassio import DOMAIN
from homeassistant.components.hassio.backup import (
LOCATION_CLOUD_BACKUP,
LOCATION_LOCAL,
RESTORE_JOB_ID_ENV,
)
from homeassistant.components.hassio.backup import RESTORE_JOB_ID_ENV
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
@ -60,17 +57,12 @@ TEST_BACKUP = supervisor_backups.Backup(
homeassistant=True,
),
date=datetime.fromisoformat("1970-01-01T00:00:00Z"),
location=None,
location_attributes={
LOCATION_LOCAL: supervisor_backups.BackupLocationAttributes(
LOCATION_LOCAL_STORAGE: supervisor_backups.BackupLocationAttributes(
protected=False, size_bytes=1048576
)
},
locations={None},
name="Test",
protected=False,
size=1.0,
size_bytes=1048576,
slug="abc123",
type=supervisor_backups.BackupType.PARTIAL,
)
@ -89,14 +81,9 @@ TEST_BACKUP_DETAILS = supervisor_backups.BackupComplete(
folders=[supervisor_backups.Folder.SHARE],
homeassistant_exclude_database=False,
homeassistant="2024.12.0",
location=TEST_BACKUP.location,
location_attributes=TEST_BACKUP.location_attributes,
locations=TEST_BACKUP.locations,
name=TEST_BACKUP.name,
protected=TEST_BACKUP.protected,
repositories=[],
size=TEST_BACKUP.size,
size_bytes=TEST_BACKUP.size_bytes,
slug=TEST_BACKUP.slug,
supervisor_version="2024.11.2",
type=TEST_BACKUP.type,
@ -110,17 +97,12 @@ TEST_BACKUP_2 = supervisor_backups.Backup(
homeassistant=False,
),
date=datetime.fromisoformat("1970-01-01T00:00:00Z"),
location=None,
location_attributes={
LOCATION_LOCAL: supervisor_backups.BackupLocationAttributes(
LOCATION_LOCAL_STORAGE: supervisor_backups.BackupLocationAttributes(
protected=False, size_bytes=1048576
)
},
locations={None},
name="Test",
protected=False,
size=1.0,
size_bytes=1048576,
slug="abc123",
type=supervisor_backups.BackupType.PARTIAL,
)
@ -139,14 +121,9 @@ TEST_BACKUP_DETAILS_2 = supervisor_backups.BackupComplete(
folders=[supervisor_backups.Folder.SHARE],
homeassistant_exclude_database=False,
homeassistant=None,
location=TEST_BACKUP_2.location,
location_attributes=TEST_BACKUP_2.location_attributes,
locations=TEST_BACKUP_2.locations,
name=TEST_BACKUP_2.name,
protected=TEST_BACKUP_2.protected,
repositories=[],
size=TEST_BACKUP_2.size,
size_bytes=TEST_BACKUP_2.size_bytes,
slug=TEST_BACKUP_2.slug,
supervisor_version="2024.11.2",
type=TEST_BACKUP_2.type,
@ -160,17 +137,12 @@ TEST_BACKUP_3 = supervisor_backups.Backup(
homeassistant=True,
),
date=datetime.fromisoformat("1970-01-01T00:00:00Z"),
location="share",
location_attributes={
LOCATION_LOCAL: supervisor_backups.BackupLocationAttributes(
LOCATION_LOCAL_STORAGE: supervisor_backups.BackupLocationAttributes(
protected=False, size_bytes=1048576
)
},
locations={"share"},
name="Test",
protected=False,
size=1.0,
size_bytes=1048576,
slug="abc123",
type=supervisor_backups.BackupType.PARTIAL,
)
@ -189,14 +161,9 @@ TEST_BACKUP_DETAILS_3 = supervisor_backups.BackupComplete(
folders=[supervisor_backups.Folder.SHARE],
homeassistant_exclude_database=False,
homeassistant=None,
location=TEST_BACKUP_3.location,
location_attributes=TEST_BACKUP_3.location_attributes,
locations=TEST_BACKUP_3.locations,
name=TEST_BACKUP_3.name,
protected=TEST_BACKUP_3.protected,
repositories=[],
size=TEST_BACKUP_3.size,
size_bytes=TEST_BACKUP_3.size_bytes,
slug=TEST_BACKUP_3.slug,
supervisor_version="2024.11.2",
type=TEST_BACKUP_3.type,
@ -211,17 +178,12 @@ TEST_BACKUP_4 = supervisor_backups.Backup(
homeassistant=True,
),
date=datetime.fromisoformat("1970-01-01T00:00:00Z"),
location=None,
location_attributes={
LOCATION_LOCAL: supervisor_backups.BackupLocationAttributes(
LOCATION_LOCAL_STORAGE: supervisor_backups.BackupLocationAttributes(
protected=False, size_bytes=1048576
)
},
locations={None},
name="Test",
protected=False,
size=1.0,
size_bytes=1048576,
slug="abc123",
type=supervisor_backups.BackupType.PARTIAL,
)
@ -240,14 +202,9 @@ TEST_BACKUP_DETAILS_4 = supervisor_backups.BackupComplete(
folders=[supervisor_backups.Folder.SHARE],
homeassistant_exclude_database=True,
homeassistant="2024.12.0",
location=TEST_BACKUP_4.location,
location_attributes=TEST_BACKUP_4.location_attributes,
locations=TEST_BACKUP_4.locations,
name=TEST_BACKUP_4.name,
protected=TEST_BACKUP_4.protected,
repositories=[],
size=TEST_BACKUP_4.size,
size_bytes=TEST_BACKUP_4.size_bytes,
slug=TEST_BACKUP_4.slug,
supervisor_version="2024.11.2",
type=TEST_BACKUP_4.type,
@ -261,17 +218,12 @@ TEST_BACKUP_5 = supervisor_backups.Backup(
homeassistant=True,
),
date=datetime.fromisoformat("1970-01-01T00:00:00Z"),
location=LOCATION_CLOUD_BACKUP,
location_attributes={
LOCATION_CLOUD_BACKUP: supervisor_backups.BackupLocationAttributes(
protected=False, size_bytes=1048576
)
},
locations={LOCATION_CLOUD_BACKUP},
name="Test",
protected=False,
size=1.0,
size_bytes=1048576,
slug="abc123",
type=supervisor_backups.BackupType.PARTIAL,
)
@ -290,14 +242,9 @@ TEST_BACKUP_DETAILS_5 = supervisor_backups.BackupComplete(
folders=[supervisor_backups.Folder.SHARE],
homeassistant_exclude_database=False,
homeassistant="2024.12.0",
location=TEST_BACKUP_5.location,
location_attributes=TEST_BACKUP_5.location_attributes,
locations=TEST_BACKUP_5.locations,
name=TEST_BACKUP_5.name,
protected=TEST_BACKUP_5.protected,
repositories=[],
size=TEST_BACKUP_5.size,
size_bytes=TEST_BACKUP_5.size_bytes,
slug=TEST_BACKUP_5.slug,
supervisor_version="2024.11.2",
type=TEST_BACKUP_5.type,
@ -312,6 +259,7 @@ TEST_JOB_NOT_DONE = supervisor_jobs.Job(
stage="copy_additional_locations",
done=False,
errors=[],
created=datetime.fromisoformat("1970-01-01T00:00:00Z"),
child_jobs=[],
)
TEST_JOB_DONE = supervisor_jobs.Job(
@ -322,6 +270,7 @@ TEST_JOB_DONE = supervisor_jobs.Job(
stage="copy_additional_locations",
done=True,
errors=[],
created=datetime.fromisoformat("1970-01-01T00:00:00Z"),
child_jobs=[],
)
TEST_RESTORE_JOB_DONE_WITH_ERROR = supervisor_jobs.Job(
@ -340,6 +289,7 @@ TEST_RESTORE_JOB_DONE_WITH_ERROR = supervisor_jobs.Job(
),
)
],
created=datetime.fromisoformat("1970-01-01T00:00:00Z"),
child_jobs=[],
)
@ -580,7 +530,10 @@ async def test_agent_download(
assert await resp.content.read() == b"backup data"
supervisor_client.backups.download_backup.assert_called_once_with(
"abc123", options=supervisor_backups.DownloadBackupOptions(location=None)
"abc123",
options=supervisor_backups.DownloadBackupOptions(
location=LOCATION_LOCAL_STORAGE
),
)
@ -766,7 +719,10 @@ async def test_agent_delete_backup(
assert response["success"]
assert response["result"] == {"agent_errors": {}}
supervisor_client.backups.remove_backup.assert_called_once_with(
backup_id, options=supervisor_backups.RemoveBackupOptions(location={None})
backup_id,
options=supervisor_backups.RemoveBackupOptions(
location={LOCATION_LOCAL_STORAGE}
),
)
@ -812,7 +768,10 @@ async def test_agent_delete_with_error(
assert response == {"id": 1, "type": "result"} | expected_response
supervisor_client.backups.remove_backup.assert_called_once_with(
backup_id, options=supervisor_backups.RemoveBackupOptions(location={None})
backup_id,
options=supervisor_backups.RemoveBackupOptions(
location={LOCATION_LOCAL_STORAGE}
),
)
@ -891,7 +850,7 @@ DEFAULT_BACKUP_OPTIONS = supervisor_backups.PartialBackupOptions(
folders={"ssl"},
homeassistant_exclude_database=False,
homeassistant=True,
location=[None],
location=[LOCATION_LOCAL_STORAGE],
name="Test",
password=None,
)
@ -947,7 +906,7 @@ async def test_reader_writer_create(
"""Test generating a backup."""
client = await hass_ws_client(hass)
freezer.move_to("2025-01-30 13:42:12.345678")
supervisor_client.backups.partial_backup.return_value.job_id = TEST_JOB_ID
supervisor_client.backups.partial_backup.return_value.job_id = UUID(TEST_JOB_ID)
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
@ -1022,7 +981,7 @@ async def test_reader_writer_create_report_progress(
"""Test generating a backup."""
client = await hass_ws_client(hass)
freezer.move_to("2025-01-30 13:42:12.345678")
supervisor_client.backups.partial_backup.return_value.job_id = TEST_JOB_ID
supervisor_client.backups.partial_backup.return_value.job_id = UUID(TEST_JOB_ID)
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
@ -1129,7 +1088,7 @@ async def test_reader_writer_create_job_done(
"""Test generating a backup, and backup job finishes early."""
client = await hass_ws_client(hass)
freezer.move_to("2025-01-30 13:42:12.345678")
supervisor_client.backups.partial_backup.return_value.job_id = TEST_JOB_ID
supervisor_client.backups.partial_backup.return_value.job_id = UUID(TEST_JOB_ID)
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
supervisor_client.jobs.get_job.return_value = TEST_JOB_DONE
@ -1198,7 +1157,7 @@ async def test_reader_writer_create_job_done(
None,
["hassio.local", "hassio.share1", "hassio.share2", "hassio.share3"],
None,
[None, "share1", "share2", "share3"],
[LOCATION_LOCAL_STORAGE, "share1", "share2", "share3"],
False,
[],
),
@ -1207,7 +1166,7 @@ async def test_reader_writer_create_job_done(
"hunter2",
["hassio.local", "hassio.share1", "hassio.share2", "hassio.share3"],
"hunter2",
[None, "share1", "share2", "share3"],
[LOCATION_LOCAL_STORAGE, "share1", "share2", "share3"],
True,
[],
),
@ -1225,7 +1184,7 @@ async def test_reader_writer_create_job_done(
"hunter2",
["share1", "share2", "share3"],
True,
[None],
[LOCATION_LOCAL_STORAGE],
),
(
[
@ -1242,7 +1201,7 @@ async def test_reader_writer_create_job_done(
"hunter2",
["share2", "share3"],
True,
[None, "share1"],
[LOCATION_LOCAL_STORAGE, "share1"],
),
(
[
@ -1258,7 +1217,7 @@ async def test_reader_writer_create_job_done(
"hunter2",
["hassio.local", "hassio.share1", "hassio.share2", "hassio.share3"],
None,
[None, "share1", "share2"],
[LOCATION_LOCAL_STORAGE, "share1", "share2"],
True,
["share3"],
),
@ -1274,7 +1233,7 @@ async def test_reader_writer_create_job_done(
"hunter2",
["hassio.local"],
None,
[None],
[LOCATION_LOCAL_STORAGE],
False,
[],
),
@ -1312,15 +1271,14 @@ async def test_reader_writer_create_per_agent_encryption(
for i in range(1, 4)
],
)
supervisor_client.backups.partial_backup.return_value.job_id = TEST_JOB_ID
supervisor_client.backups.partial_backup.return_value.job_id = UUID(TEST_JOB_ID)
supervisor_client.backups.backup_info.return_value = replace(
TEST_BACKUP_DETAILS,
extra=DEFAULT_BACKUP_OPTIONS.extra,
locations=create_locations,
location_attributes={
location or LOCATION_LOCAL: supervisor_backups.BackupLocationAttributes(
location: supervisor_backups.BackupLocationAttributes(
protected=create_protected,
size_bytes=TEST_BACKUP_DETAILS.size_bytes,
size_bytes=1048576,
)
for location in create_locations
},
@ -1514,7 +1472,7 @@ async def test_reader_writer_create_missing_reference_error(
) -> None:
"""Test missing reference error when generating a backup."""
client = await hass_ws_client(hass)
supervisor_client.backups.partial_backup.return_value.job_id = TEST_JOB_ID
supervisor_client.backups.partial_backup.return_value.job_id = UUID(TEST_JOB_ID)
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
await client.send_json_auto_id({"type": "backup/subscribe_events"})
@ -1581,7 +1539,7 @@ async def test_reader_writer_create_download_remove_error(
) -> None:
"""Test download and remove error when generating a backup."""
client = await hass_ws_client(hass)
supervisor_client.backups.partial_backup.return_value.job_id = TEST_JOB_ID
supervisor_client.backups.partial_backup.return_value.job_id = UUID(TEST_JOB_ID)
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS_5
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
method_mock = getattr(supervisor_client.backups, method)
@ -1668,7 +1626,7 @@ async def test_reader_writer_create_info_error(
) -> None:
"""Test backup info error when generating a backup."""
client = await hass_ws_client(hass)
supervisor_client.backups.partial_backup.return_value.job_id = TEST_JOB_ID
supervisor_client.backups.partial_backup.return_value.job_id = UUID(TEST_JOB_ID)
supervisor_client.backups.backup_info.side_effect = exception
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
@ -1745,7 +1703,7 @@ async def test_reader_writer_create_remote_backup(
"""Test generating a backup which will be uploaded to a remote agent."""
client = await hass_ws_client(hass)
freezer.move_to("2025-01-30 13:42:12.345678")
supervisor_client.backups.partial_backup.return_value.job_id = TEST_JOB_ID
supervisor_client.backups.partial_backup.return_value.job_id = UUID(TEST_JOB_ID)
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS_5
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
@ -1848,7 +1806,7 @@ async def test_reader_writer_create_wrong_parameters(
) -> None:
"""Test generating a backup."""
client = await hass_ws_client(hass)
supervisor_client.backups.partial_backup.return_value.job_id = TEST_JOB_ID
supervisor_client.backups.partial_backup.return_value.job_id = UUID(TEST_JOB_ID)
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
await client.send_json_auto_id({"type": "backup/subscribe_events"})
@ -1975,7 +1933,7 @@ async def test_reader_writer_restore(
) -> None:
"""Test restoring a backup."""
client = await hass_ws_client(hass)
supervisor_client.backups.partial_restore.return_value.job_id = TEST_JOB_ID
supervisor_client.backups.partial_restore.return_value.job_id = UUID(TEST_JOB_ID)
supervisor_client.backups.list.return_value = [TEST_BACKUP]
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
supervisor_client.jobs.get_job.return_value = get_job_result
@ -2006,7 +1964,7 @@ async def test_reader_writer_restore(
background=True,
folders=None,
homeassistant=True,
location=None,
location=LOCATION_LOCAL_STORAGE,
password=None,
),
)
@ -2040,7 +1998,7 @@ async def test_reader_writer_restore_report_progress(
) -> None:
"""Test restoring a backup."""
client = await hass_ws_client(hass)
supervisor_client.backups.partial_restore.return_value.job_id = TEST_JOB_ID
supervisor_client.backups.partial_restore.return_value.job_id = UUID(TEST_JOB_ID)
supervisor_client.backups.list.return_value = [TEST_BACKUP]
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
@ -2071,7 +2029,7 @@ async def test_reader_writer_restore_report_progress(
background=True,
folders=None,
homeassistant=True,
location=None,
location=LOCATION_LOCAL_STORAGE,
password=None,
),
)
@ -2193,7 +2151,7 @@ async def test_reader_writer_restore_error(
background=True,
folders=None,
homeassistant=True,
location=None,
location=LOCATION_LOCAL_STORAGE,
password=None,
),
)
@ -2221,7 +2179,7 @@ async def test_reader_writer_restore_late_error(
) -> None:
"""Test restoring a backup with error."""
client = await hass_ws_client(hass)
supervisor_client.backups.partial_restore.return_value.job_id = TEST_JOB_ID
supervisor_client.backups.partial_restore.return_value.job_id = UUID(TEST_JOB_ID)
supervisor_client.backups.list.return_value = [TEST_BACKUP]
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
@ -2250,7 +2208,7 @@ async def test_reader_writer_restore_late_error(
background=True,
folders=None,
homeassistant=True,
location=None,
location=LOCATION_LOCAL_STORAGE,
password=None,
),
)