This commit is contained in:
Franck Nijhof 2025-02-12 20:46:47 +01:00 committed by GitHub
commit c2f6255d16
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
30 changed files with 386 additions and 402 deletions

View File

@ -19,10 +19,20 @@ class ApSystemsEntity(Entity):
data: ApSystemsData, data: ApSystemsData,
) -> None: ) -> None:
"""Initialize the APsystems entity.""" """Initialize the APsystems entity."""
# Handle device version safely
sw_version = None
if data.coordinator.device_version:
version_parts = data.coordinator.device_version.split(" ")
if len(version_parts) > 1:
sw_version = version_parts[1]
else:
sw_version = version_parts[0]
self._attr_device_info = DeviceInfo( self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, data.device_id)}, identifiers={(DOMAIN, data.device_id)},
manufacturer="APsystems", manufacturer="APsystems",
model="EZ1-M", model="EZ1-M",
serial_number=data.device_id, serial_number=data.device_id,
sw_version=data.coordinator.device_version.split(" ")[1], sw_version=sw_version,
) )

View File

@ -688,8 +688,8 @@ class BackupManager:
delete_backup_results = await asyncio.gather( delete_backup_results = await asyncio.gather(
*( *(
agent.async_delete_backup(backup_id) self.backup_agents[agent_id].async_delete_backup(backup_id)
for agent in self.backup_agents.values() for agent_id in agent_ids
), ),
return_exceptions=True, return_exceptions=True,
) )

View File

@ -3,21 +3,16 @@
from __future__ import annotations from __future__ import annotations
import asyncio import asyncio
import base64
from collections.abc import AsyncIterator, Callable, Coroutine, Mapping from collections.abc import AsyncIterator, Callable, Coroutine, Mapping
import hashlib
import logging import logging
import random import random
from typing import Any from typing import Any
from aiohttp import ClientError, ClientTimeout from aiohttp import ClientError
from hass_nabucasa import Cloud, CloudError from hass_nabucasa import Cloud, CloudError
from hass_nabucasa.cloud_api import ( from hass_nabucasa.api import CloudApiNonRetryableError
async_files_delete_file, from hass_nabucasa.cloud_api import async_files_delete_file, async_files_list
async_files_download_details, from hass_nabucasa.files import FilesError, StorageType, calculate_b64md5
async_files_list,
async_files_upload_details,
)
from homeassistant.components.backup import AgentBackup, BackupAgent, BackupAgentError from homeassistant.components.backup import AgentBackup, BackupAgent, BackupAgentError
from homeassistant.core import HomeAssistant, callback from homeassistant.core import HomeAssistant, callback
@ -28,20 +23,11 @@ from .client import CloudClient
from .const import DATA_CLOUD, DOMAIN, EVENT_CLOUD_EVENT from .const import DATA_CLOUD, DOMAIN, EVENT_CLOUD_EVENT
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
_STORAGE_BACKUP = "backup"
_RETRY_LIMIT = 5 _RETRY_LIMIT = 5
_RETRY_SECONDS_MIN = 60 _RETRY_SECONDS_MIN = 60
_RETRY_SECONDS_MAX = 600 _RETRY_SECONDS_MAX = 600
async def _b64md5(stream: AsyncIterator[bytes]) -> str:
"""Calculate the MD5 hash of a file."""
file_hash = hashlib.md5()
async for chunk in stream:
file_hash.update(chunk)
return base64.b64encode(file_hash.digest()).decode()
async def async_get_backup_agents( async def async_get_backup_agents(
hass: HomeAssistant, hass: HomeAssistant,
**kwargs: Any, **kwargs: Any,
@ -109,63 +95,14 @@ class CloudBackupAgent(BackupAgent):
raise BackupAgentError("Backup not found") raise BackupAgentError("Backup not found")
try: try:
details = await async_files_download_details( content = await self._cloud.files.download(
self._cloud, storage_type=StorageType.BACKUP,
storage_type=_STORAGE_BACKUP,
filename=self._get_backup_filename(), filename=self._get_backup_filename(),
) )
except (ClientError, CloudError) as err: except CloudError as err:
raise BackupAgentError("Failed to get download details") from err raise BackupAgentError(f"Failed to download backup: {err}") from err
try: return ChunkAsyncStreamIterator(content)
resp = await self._cloud.websession.get(
details["url"],
timeout=ClientTimeout(connect=10.0, total=43200.0), # 43200s == 12h
)
resp.raise_for_status()
except ClientError as err:
raise BackupAgentError("Failed to download backup") from err
return ChunkAsyncStreamIterator(resp.content)
async def _async_do_upload_backup(
self,
*,
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
filename: str,
base64md5hash: str,
metadata: dict[str, Any],
size: int,
) -> None:
"""Upload a backup."""
try:
details = await async_files_upload_details(
self._cloud,
storage_type=_STORAGE_BACKUP,
filename=filename,
metadata=metadata,
size=size,
base64md5hash=base64md5hash,
)
except (ClientError, CloudError) as err:
raise BackupAgentError("Failed to get upload details") from err
try:
upload_status = await self._cloud.websession.put(
details["url"],
data=await open_stream(),
headers=details["headers"] | {"content-length": str(size)},
timeout=ClientTimeout(connect=10.0, total=43200.0), # 43200s == 12h
)
_LOGGER.log(
logging.DEBUG if upload_status.status < 400 else logging.WARNING,
"Backup upload status: %s",
upload_status.status,
)
upload_status.raise_for_status()
except (TimeoutError, ClientError) as err:
raise BackupAgentError("Failed to upload backup") from err
async def async_upload_backup( async def async_upload_backup(
self, self,
@ -182,15 +119,19 @@ class CloudBackupAgent(BackupAgent):
if not backup.protected: if not backup.protected:
raise BackupAgentError("Cloud backups must be protected") raise BackupAgentError("Cloud backups must be protected")
base64md5hash = await _b64md5(await open_stream()) size = backup.size
try:
base64md5hash = await calculate_b64md5(open_stream, size)
except FilesError as err:
raise BackupAgentError(err) from err
filename = self._get_backup_filename() filename = self._get_backup_filename()
metadata = backup.as_dict() metadata = backup.as_dict()
size = backup.size
tries = 1 tries = 1
while tries <= _RETRY_LIMIT: while tries <= _RETRY_LIMIT:
try: try:
await self._async_do_upload_backup( await self._cloud.files.upload(
storage_type=StorageType.BACKUP,
open_stream=open_stream, open_stream=open_stream,
filename=filename, filename=filename,
base64md5hash=base64md5hash, base64md5hash=base64md5hash,
@ -198,9 +139,19 @@ class CloudBackupAgent(BackupAgent):
size=size, size=size,
) )
break break
except BackupAgentError as err: except CloudApiNonRetryableError as err:
if err.code == "NC-SH-FH-03":
raise BackupAgentError(
translation_domain=DOMAIN,
translation_key="backup_size_too_large",
translation_placeholders={
"size": str(round(size / (1024**3), 2))
},
) from err
raise BackupAgentError(f"Failed to upload backup {err}") from err
except CloudError as err:
if tries == _RETRY_LIMIT: if tries == _RETRY_LIMIT:
raise raise BackupAgentError(f"Failed to upload backup {err}") from err
tries += 1 tries += 1
retry_timer = random.randint(_RETRY_SECONDS_MIN, _RETRY_SECONDS_MAX) retry_timer = random.randint(_RETRY_SECONDS_MIN, _RETRY_SECONDS_MAX)
_LOGGER.info( _LOGGER.info(
@ -227,7 +178,7 @@ class CloudBackupAgent(BackupAgent):
try: try:
await async_files_delete_file( await async_files_delete_file(
self._cloud, self._cloud,
storage_type=_STORAGE_BACKUP, storage_type=StorageType.BACKUP,
filename=self._get_backup_filename(), filename=self._get_backup_filename(),
) )
except (ClientError, CloudError) as err: except (ClientError, CloudError) as err:
@ -236,7 +187,9 @@ class CloudBackupAgent(BackupAgent):
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]: async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
"""List backups.""" """List backups."""
try: try:
backups = await async_files_list(self._cloud, storage_type=_STORAGE_BACKUP) backups = await async_files_list(
self._cloud, storage_type=StorageType.BACKUP
)
_LOGGER.debug("Cloud backups: %s", backups) _LOGGER.debug("Cloud backups: %s", backups)
except (ClientError, CloudError) as err: except (ClientError, CloudError) as err:
raise BackupAgentError("Failed to list backups") from err raise BackupAgentError("Failed to list backups") from err

View File

@ -13,6 +13,6 @@
"integration_type": "system", "integration_type": "system",
"iot_class": "cloud_push", "iot_class": "cloud_push",
"loggers": ["hass_nabucasa"], "loggers": ["hass_nabucasa"],
"requirements": ["hass-nabucasa==0.88.1"], "requirements": ["hass-nabucasa==0.90.0"],
"single_config_entry": true "single_config_entry": true
} }

View File

@ -17,6 +17,11 @@
"subscription_expiration": "Subscription expiration" "subscription_expiration": "Subscription expiration"
} }
}, },
"exceptions": {
"backup_size_too_large": {
"message": "The backup size of {size}GB is too large to be uploaded to Home Assistant Cloud."
}
},
"issues": { "issues": {
"deprecated_gender": { "deprecated_gender": {
"title": "The {deprecated_option} text-to-speech option is deprecated", "title": "The {deprecated_option} text-to-speech option is deprecated",

View File

@ -23,7 +23,7 @@ from homeassistant.components.climate import (
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.issue_registry import IssueSeverity, create_issue
from . import EconetConfigEntry from . import EconetConfigEntry
from .const import DOMAIN from .const import DOMAIN
@ -35,8 +35,13 @@ ECONET_STATE_TO_HA = {
ThermostatOperationMode.OFF: HVACMode.OFF, ThermostatOperationMode.OFF: HVACMode.OFF,
ThermostatOperationMode.AUTO: HVACMode.HEAT_COOL, ThermostatOperationMode.AUTO: HVACMode.HEAT_COOL,
ThermostatOperationMode.FAN_ONLY: HVACMode.FAN_ONLY, ThermostatOperationMode.FAN_ONLY: HVACMode.FAN_ONLY,
ThermostatOperationMode.EMERGENCY_HEAT: HVACMode.HEAT,
}
HA_STATE_TO_ECONET = {
value: key
for key, value in ECONET_STATE_TO_HA.items()
if key != ThermostatOperationMode.EMERGENCY_HEAT
} }
HA_STATE_TO_ECONET = {value: key for key, value in ECONET_STATE_TO_HA.items()}
ECONET_FAN_STATE_TO_HA = { ECONET_FAN_STATE_TO_HA = {
ThermostatFanMode.AUTO: FAN_AUTO, ThermostatFanMode.AUTO: FAN_AUTO,
@ -209,7 +214,7 @@ class EcoNetThermostat(EcoNetEntity[Thermostat], ClimateEntity):
def turn_aux_heat_on(self) -> None: def turn_aux_heat_on(self) -> None:
"""Turn auxiliary heater on.""" """Turn auxiliary heater on."""
async_create_issue( create_issue(
self.hass, self.hass,
DOMAIN, DOMAIN,
"migrate_aux_heat", "migrate_aux_heat",
@ -223,7 +228,7 @@ class EcoNetThermostat(EcoNetEntity[Thermostat], ClimateEntity):
def turn_aux_heat_off(self) -> None: def turn_aux_heat_off(self) -> None:
"""Turn auxiliary heater off.""" """Turn auxiliary heater off."""
async_create_issue( create_issue(
self.hass, self.hass,
DOMAIN, DOMAIN,
"migrate_aux_heat", "migrate_aux_heat",

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/ecovacs", "documentation": "https://www.home-assistant.io/integrations/ecovacs",
"iot_class": "cloud_push", "iot_class": "cloud_push",
"loggers": ["sleekxmppfs", "sucks", "deebot_client"], "loggers": ["sleekxmppfs", "sucks", "deebot_client"],
"requirements": ["py-sucks==0.9.10", "deebot-client==12.0.0"] "requirements": ["py-sucks==0.9.10", "deebot-client==12.1.0"]
} }

View File

@ -6,7 +6,7 @@
"documentation": "https://www.home-assistant.io/integrations/enphase_envoy", "documentation": "https://www.home-assistant.io/integrations/enphase_envoy",
"iot_class": "local_polling", "iot_class": "local_polling",
"loggers": ["pyenphase"], "loggers": ["pyenphase"],
"requirements": ["pyenphase==1.23.1"], "requirements": ["pyenphase==1.25.1"],
"zeroconf": [ "zeroconf": [
{ {
"type": "_enphase-envoy._tcp.local." "type": "_enphase-envoy._tcp.local."

View File

@ -8,7 +8,7 @@ from collections.abc import Awaitable, Callable
from http import HTTPStatus from http import HTTPStatus
import logging import logging
from aiohttp import web from aiohttp import ClientError, ClientResponseError, web
from google_nest_sdm.camera_traits import CameraClipPreviewTrait from google_nest_sdm.camera_traits import CameraClipPreviewTrait
from google_nest_sdm.device import Device from google_nest_sdm.device import Device
from google_nest_sdm.event import EventMessage from google_nest_sdm.event import EventMessage
@ -201,11 +201,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: NestConfigEntry) -> bool
auth = await api.new_auth(hass, entry) auth = await api.new_auth(hass, entry)
try: try:
await auth.async_get_access_token() await auth.async_get_access_token()
except AuthException as err: except ClientResponseError as err:
raise ConfigEntryAuthFailed(f"Authentication error: {err!s}") from err if 400 <= err.status < 500:
except ConfigurationException as err: raise ConfigEntryAuthFailed from err
_LOGGER.error("Configuration error: %s", err) raise ConfigEntryNotReady from err
return False except ClientError as err:
raise ConfigEntryNotReady from err
subscriber = await api.new_subscriber(hass, entry, auth) subscriber = await api.new_subscriber(hass, entry, auth)
if not subscriber: if not subscriber:

View File

@ -50,13 +50,14 @@ class AsyncConfigEntryAuth(AbstractAuth):
return cast(str, self._oauth_session.token["access_token"]) return cast(str, self._oauth_session.token["access_token"])
async def async_get_creds(self) -> Credentials: async def async_get_creds(self) -> Credentials:
"""Return an OAuth credential for Pub/Sub Subscriber.""" """Return an OAuth credential for Pub/Sub Subscriber.
# We don't have a way for Home Assistant to refresh creds on behalf
# of the google pub/sub subscriber. Instead, build a full The subscriber will call this when connecting to the stream to refresh
# Credentials object with enough information for the subscriber to the token. We construct a credentials object using the underlying
# handle this on its own. We purposely don't refresh the token here OAuth2Session since the subscriber may expect the expiry fields to
# even when it is expired to fully hand off this responsibility and be present.
# know it is working at startup (then if not, fail loudly). """
await self.async_get_access_token()
token = self._oauth_session.token token = self._oauth_session.token
creds = Credentials( # type: ignore[no-untyped-call] creds = Credentials( # type: ignore[no-untyped-call]
token=token["access_token"], token=token["access_token"],

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/sentry", "documentation": "https://www.home-assistant.io/integrations/sentry",
"integration_type": "service", "integration_type": "service",
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"requirements": ["sentry-sdk==1.40.3"] "requirements": ["sentry-sdk==1.45.1"]
} }

View File

@ -9,6 +9,7 @@ import logging
from kasa import AuthenticationError, Credentials, Device, KasaException from kasa import AuthenticationError, Credentials, Device, KasaException
from kasa.iot import IotStrip from kasa.iot import IotStrip
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
from homeassistant.config_entries import ConfigEntry from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.exceptions import ConfigEntryAuthFailed
@ -46,11 +47,9 @@ class TPLinkDataUpdateCoordinator(DataUpdateCoordinator[None]):
device: Device, device: Device,
update_interval: timedelta, update_interval: timedelta,
config_entry: TPLinkConfigEntry, config_entry: TPLinkConfigEntry,
parent_coordinator: TPLinkDataUpdateCoordinator | None = None,
) -> None: ) -> None:
"""Initialize DataUpdateCoordinator to gather data for specific SmartPlug.""" """Initialize DataUpdateCoordinator to gather data for specific SmartPlug."""
self.device = device self.device = device
self.parent_coordinator = parent_coordinator
# The iot HS300 allows a limited number of concurrent requests and # The iot HS300 allows a limited number of concurrent requests and
# fetching the emeter information requires separate ones, so child # fetching the emeter information requires separate ones, so child
@ -97,12 +96,6 @@ class TPLinkDataUpdateCoordinator(DataUpdateCoordinator[None]):
) from ex ) from ex
await self._process_child_devices() await self._process_child_devices()
if not self._update_children:
# If the children are not being updated, it means this is an
# IotStrip, and we need to tell the children to write state
# since the power state is provided by the parent.
for child_coordinator in self._child_coordinators.values():
child_coordinator.async_set_updated_data(None)
async def _process_child_devices(self) -> None: async def _process_child_devices(self) -> None:
"""Process child devices and remove stale devices.""" """Process child devices and remove stale devices."""
@ -131,20 +124,19 @@ class TPLinkDataUpdateCoordinator(DataUpdateCoordinator[None]):
def get_child_coordinator( def get_child_coordinator(
self, self,
child: Device, child: Device,
platform_domain: str,
) -> TPLinkDataUpdateCoordinator: ) -> TPLinkDataUpdateCoordinator:
"""Get separate child coordinator for a device or self if not needed.""" """Get separate child coordinator for a device or self if not needed."""
# The iot HS300 allows a limited number of concurrent requests and fetching the # The iot HS300 allows a limited number of concurrent requests and fetching the
# emeter information requires separate ones so create child coordinators here. # emeter information requires separate ones so create child coordinators here.
if isinstance(self.device, IotStrip): # This does not happen for switches as the state is available on the
# parent device info.
if isinstance(self.device, IotStrip) and platform_domain != SWITCH_DOMAIN:
if not (child_coordinator := self._child_coordinators.get(child.device_id)): if not (child_coordinator := self._child_coordinators.get(child.device_id)):
# The child coordinators only update energy data so we can # The child coordinators only update energy data so we can
# set a longer update interval to avoid flooding the device # set a longer update interval to avoid flooding the device
child_coordinator = TPLinkDataUpdateCoordinator( child_coordinator = TPLinkDataUpdateCoordinator(
self.hass, self.hass, child, timedelta(seconds=60), self.config_entry
child,
timedelta(seconds=60),
self.config_entry,
parent_coordinator=self,
) )
self._child_coordinators[child.device_id] = child_coordinator self._child_coordinators[child.device_id] = child_coordinator
return child_coordinator return child_coordinator

View File

@ -151,13 +151,7 @@ def async_refresh_after[_T: CoordinatedTPLinkEntity, **_P](
"exc": str(ex), "exc": str(ex),
}, },
) from ex ) from ex
coordinator = self.coordinator await self.coordinator.async_request_refresh()
if coordinator.parent_coordinator:
# If there is a parent coordinator we need to refresh
# the parent as its what provides the power state data
# for the child entities.
coordinator = coordinator.parent_coordinator
await coordinator.async_request_refresh()
return _async_wrap return _async_wrap
@ -514,7 +508,9 @@ class CoordinatedTPLinkFeatureEntity(CoordinatedTPLinkEntity, ABC):
) )
for child in children: for child in children:
child_coordinator = coordinator.get_child_coordinator(child) child_coordinator = coordinator.get_child_coordinator(
child, platform_domain
)
child_entities = cls._entities_for_device( child_entities = cls._entities_for_device(
hass, hass,
@ -657,7 +653,9 @@ class CoordinatedTPLinkModuleEntity(CoordinatedTPLinkEntity, ABC):
device.host, device.host,
) )
for child in children: for child in children:
child_coordinator = coordinator.get_child_coordinator(child) child_coordinator = coordinator.get_child_coordinator(
child, platform_domain
)
child_entities: list[_E] = cls._entities_for_device( child_entities: list[_E] = cls._entities_for_device(
hass, hass,

View File

@ -8,5 +8,5 @@
"iot_class": "local_push", "iot_class": "local_push",
"loggers": ["zeroconf"], "loggers": ["zeroconf"],
"quality_scale": "internal", "quality_scale": "internal",
"requirements": ["zeroconf==0.143.0"] "requirements": ["zeroconf==0.144.1"]
} }

View File

@ -25,7 +25,7 @@ if TYPE_CHECKING:
APPLICATION_NAME: Final = "HomeAssistant" APPLICATION_NAME: Final = "HomeAssistant"
MAJOR_VERSION: Final = 2025 MAJOR_VERSION: Final = 2025
MINOR_VERSION: Final = 2 MINOR_VERSION: Final = 2
PATCH_VERSION: Final = "2" PATCH_VERSION: Final = "3"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 0) REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 0)

View File

@ -28,13 +28,13 @@ cached-ipaddress==0.8.0
certifi>=2021.5.30 certifi>=2021.5.30
ciso8601==2.3.2 ciso8601==2.3.2
cronsim==2.6 cronsim==2.6
cryptography==44.0.0 cryptography==44.0.1
dbus-fast==2.33.0 dbus-fast==2.33.0
fnv-hash-fast==1.2.2 fnv-hash-fast==1.2.2
go2rtc-client==0.1.2 go2rtc-client==0.1.2
ha-ffmpeg==3.2.2 ha-ffmpeg==3.2.2
habluetooth==3.21.1 habluetooth==3.21.1
hass-nabucasa==0.88.1 hass-nabucasa==0.90.0
hassil==2.2.3 hassil==2.2.3
home-assistant-bluetooth==1.13.0 home-assistant-bluetooth==1.13.0
home-assistant-frontend==20250210.0 home-assistant-frontend==20250210.0
@ -73,7 +73,7 @@ voluptuous-serialize==2.6.0
voluptuous==0.15.2 voluptuous==0.15.2
webrtc-models==0.3.0 webrtc-models==0.3.0
yarl==1.18.3 yarl==1.18.3
zeroconf==0.143.0 zeroconf==0.144.1
# Constrain pycryptodome to avoid vulnerability # Constrain pycryptodome to avoid vulnerability
# see https://github.com/home-assistant/core/pull/16238 # see https://github.com/home-assistant/core/pull/16238

View File

@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project] [project]
name = "homeassistant" name = "homeassistant"
version = "2025.2.2" version = "2025.2.3"
license = {text = "Apache-2.0"} license = {text = "Apache-2.0"}
description = "Open-source home automation platform running on Python 3." description = "Open-source home automation platform running on Python 3."
readme = "README.rst" readme = "README.rst"
@ -46,7 +46,7 @@ dependencies = [
"fnv-hash-fast==1.2.2", "fnv-hash-fast==1.2.2",
# hass-nabucasa is imported by helpers which don't depend on the cloud # hass-nabucasa is imported by helpers which don't depend on the cloud
# integration # integration
"hass-nabucasa==0.88.1", "hass-nabucasa==0.90.0",
# When bumping httpx, please check the version pins of # When bumping httpx, please check the version pins of
# httpcore, anyio, and h11 in gen_requirements_all # httpcore, anyio, and h11 in gen_requirements_all
"httpx==0.28.1", "httpx==0.28.1",
@ -56,7 +56,7 @@ dependencies = [
"lru-dict==1.3.0", "lru-dict==1.3.0",
"PyJWT==2.10.1", "PyJWT==2.10.1",
# PyJWT has loose dependency. We want the latest one. # PyJWT has loose dependency. We want the latest one.
"cryptography==44.0.0", "cryptography==44.0.1",
"Pillow==11.1.0", "Pillow==11.1.0",
"propcache==0.2.1", "propcache==0.2.1",
"pyOpenSSL==24.3.0", "pyOpenSSL==24.3.0",
@ -82,7 +82,7 @@ dependencies = [
"voluptuous-openapi==0.0.6", "voluptuous-openapi==0.0.6",
"yarl==1.18.3", "yarl==1.18.3",
"webrtc-models==0.3.0", "webrtc-models==0.3.0",
"zeroconf==0.143.0" "zeroconf==0.144.1"
] ]
[project.urls] [project.urls]

6
requirements.txt generated
View File

@ -21,14 +21,14 @@ certifi>=2021.5.30
ciso8601==2.3.2 ciso8601==2.3.2
cronsim==2.6 cronsim==2.6
fnv-hash-fast==1.2.2 fnv-hash-fast==1.2.2
hass-nabucasa==0.88.1 hass-nabucasa==0.90.0
httpx==0.28.1 httpx==0.28.1
home-assistant-bluetooth==1.13.0 home-assistant-bluetooth==1.13.0
ifaddr==0.2.0 ifaddr==0.2.0
Jinja2==3.1.5 Jinja2==3.1.5
lru-dict==1.3.0 lru-dict==1.3.0
PyJWT==2.10.1 PyJWT==2.10.1
cryptography==44.0.0 cryptography==44.0.1
Pillow==11.1.0 Pillow==11.1.0
propcache==0.2.1 propcache==0.2.1
pyOpenSSL==24.3.0 pyOpenSSL==24.3.0
@ -51,4 +51,4 @@ voluptuous-serialize==2.6.0
voluptuous-openapi==0.0.6 voluptuous-openapi==0.0.6
yarl==1.18.3 yarl==1.18.3
webrtc-models==0.3.0 webrtc-models==0.3.0
zeroconf==0.143.0 zeroconf==0.144.1

10
requirements_all.txt generated
View File

@ -747,7 +747,7 @@ debugpy==1.8.11
# decora==0.6 # decora==0.6
# homeassistant.components.ecovacs # homeassistant.components.ecovacs
deebot-client==12.0.0 deebot-client==12.1.0
# homeassistant.components.ihc # homeassistant.components.ihc
# homeassistant.components.namecheapdns # homeassistant.components.namecheapdns
@ -1103,7 +1103,7 @@ habiticalib==0.3.7
habluetooth==3.21.1 habluetooth==3.21.1
# homeassistant.components.cloud # homeassistant.components.cloud
hass-nabucasa==0.88.1 hass-nabucasa==0.90.0
# homeassistant.components.splunk # homeassistant.components.splunk
hass-splunk==0.1.1 hass-splunk==0.1.1
@ -1930,7 +1930,7 @@ pyeiscp==0.0.7
pyemoncms==0.1.1 pyemoncms==0.1.1
# homeassistant.components.enphase_envoy # homeassistant.components.enphase_envoy
pyenphase==1.23.1 pyenphase==1.25.1
# homeassistant.components.envisalink # homeassistant.components.envisalink
pyenvisalink==4.7 pyenvisalink==4.7
@ -2694,7 +2694,7 @@ sensorpush-ble==1.7.1
sensoterra==2.0.1 sensoterra==2.0.1
# homeassistant.components.sentry # homeassistant.components.sentry
sentry-sdk==1.40.3 sentry-sdk==1.45.1
# homeassistant.components.sfr_box # homeassistant.components.sfr_box
sfrbox-api==0.0.11 sfrbox-api==0.0.11
@ -3125,7 +3125,7 @@ zamg==0.3.6
zengge==0.2 zengge==0.2
# homeassistant.components.zeroconf # homeassistant.components.zeroconf
zeroconf==0.143.0 zeroconf==0.144.1
# homeassistant.components.zeversolar # homeassistant.components.zeversolar
zeversolar==0.3.2 zeversolar==0.3.2

View File

@ -637,7 +637,7 @@ dbus-fast==2.33.0
debugpy==1.8.11 debugpy==1.8.11
# homeassistant.components.ecovacs # homeassistant.components.ecovacs
deebot-client==12.0.0 deebot-client==12.1.0
# homeassistant.components.ihc # homeassistant.components.ihc
# homeassistant.components.namecheapdns # homeassistant.components.namecheapdns
@ -944,7 +944,7 @@ habiticalib==0.3.7
habluetooth==3.21.1 habluetooth==3.21.1
# homeassistant.components.cloud # homeassistant.components.cloud
hass-nabucasa==0.88.1 hass-nabucasa==0.90.0
# homeassistant.components.conversation # homeassistant.components.conversation
hassil==2.2.3 hassil==2.2.3
@ -1574,7 +1574,7 @@ pyeiscp==0.0.7
pyemoncms==0.1.1 pyemoncms==0.1.1
# homeassistant.components.enphase_envoy # homeassistant.components.enphase_envoy
pyenphase==1.23.1 pyenphase==1.25.1
# homeassistant.components.everlights # homeassistant.components.everlights
pyeverlights==0.1.0 pyeverlights==0.1.0
@ -2173,7 +2173,7 @@ sensorpush-ble==1.7.1
sensoterra==2.0.1 sensoterra==2.0.1
# homeassistant.components.sentry # homeassistant.components.sentry
sentry-sdk==1.40.3 sentry-sdk==1.45.1
# homeassistant.components.sfr_box # homeassistant.components.sfr_box
sfrbox-api==0.0.11 sfrbox-api==0.0.11
@ -2514,7 +2514,7 @@ yt-dlp[default]==2025.01.26
zamg==0.3.6 zamg==0.3.6
# homeassistant.components.zeroconf # homeassistant.components.zeroconf
zeroconf==0.143.0 zeroconf==0.144.1
# homeassistant.components.zeversolar # homeassistant.components.zeversolar
zeversolar==0.3.2 zeversolar==0.3.2

View File

@ -6,7 +6,6 @@ from unittest.mock import ANY, AsyncMock, MagicMock, Mock, call, patch
from freezegun.api import FrozenDateTimeFactory from freezegun.api import FrozenDateTimeFactory
import pytest import pytest
from pytest_unordered import unordered
from syrupy import SnapshotAssertion from syrupy import SnapshotAssertion
from homeassistant.components.backup import ( from homeassistant.components.backup import (
@ -100,15 +99,6 @@ def mock_delay_save() -> Generator[None]:
yield yield
@pytest.fixture(name="delete_backup")
def mock_delete_backup() -> Generator[AsyncMock]:
"""Mock manager delete backup."""
with patch(
"homeassistant.components.backup.BackupManager.async_delete_backup"
) as mock_delete_backup:
yield mock_delete_backup
@pytest.fixture(name="get_backups") @pytest.fixture(name="get_backups")
def mock_get_backups() -> Generator[AsyncMock]: def mock_get_backups() -> Generator[AsyncMock]:
"""Mock manager get backups.""" """Mock manager get backups."""
@ -911,7 +901,7 @@ async def test_agents_info(
assert await client.receive_json() == snapshot assert await client.receive_json() == snapshot
@pytest.mark.usefixtures("create_backup", "delete_backup", "get_backups") @pytest.mark.usefixtures("get_backups")
@pytest.mark.parametrize( @pytest.mark.parametrize(
"storage_data", "storage_data",
[ [
@ -1161,7 +1151,7 @@ async def test_config_info(
assert await client.receive_json() == snapshot assert await client.receive_json() == snapshot
@pytest.mark.usefixtures("create_backup", "delete_backup", "get_backups") @pytest.mark.usefixtures("get_backups")
@pytest.mark.parametrize( @pytest.mark.parametrize(
"commands", "commands",
[ [
@ -1326,7 +1316,7 @@ async def test_config_update(
assert hass_storage[DOMAIN] == snapshot assert hass_storage[DOMAIN] == snapshot
@pytest.mark.usefixtures("create_backup", "delete_backup", "get_backups") @pytest.mark.usefixtures("get_backups")
@pytest.mark.parametrize( @pytest.mark.parametrize(
"command", "command",
[ [
@ -1783,14 +1773,13 @@ async def test_config_schedule_logic(
"command", "command",
"backups", "backups",
"get_backups_agent_errors", "get_backups_agent_errors",
"delete_backup_agent_errors", "agent_delete_backup_side_effects",
"last_backup_time", "last_backup_time",
"next_time", "next_time",
"backup_time", "backup_time",
"backup_calls", "backup_calls",
"get_backups_calls", "get_backups_calls",
"delete_calls", "delete_calls",
"delete_args_list",
), ),
[ [
( (
@ -1833,8 +1822,7 @@ async def test_config_schedule_logic(
"2024-11-12T04:45:00+01:00", "2024-11-12T04:45:00+01:00",
1, 1,
1, # we get backups even if backup retention copies is None 1, # we get backups even if backup retention copies is None
0, {},
[],
), ),
( (
{ {
@ -1876,8 +1864,7 @@ async def test_config_schedule_logic(
"2024-11-12T04:45:00+01:00", "2024-11-12T04:45:00+01:00",
1, 1,
1, 1,
0, {},
[],
), ),
( (
{ {
@ -1907,8 +1894,7 @@ async def test_config_schedule_logic(
"2024-11-12T04:45:00+01:00", "2024-11-12T04:45:00+01:00",
1, 1,
1, 1,
0, {},
[],
), ),
( (
{ {
@ -1971,13 +1957,10 @@ async def test_config_schedule_logic(
"2024-11-12T04:45:00+01:00", "2024-11-12T04:45:00+01:00",
1, 1,
1, 1,
1, {
[ "test.test-agent": [call("backup-1")],
call( "test.test-agent2": [call("backup-1")],
"backup-1", },
agent_ids=unordered(["test.test-agent", "test.test-agent2"]),
)
],
), ),
( (
{ {
@ -2039,13 +2022,10 @@ async def test_config_schedule_logic(
"2024-11-12T04:45:00+01:00", "2024-11-12T04:45:00+01:00",
1, 1,
1, 1,
1, {
[ "test.test-agent": [call("backup-1")],
call( "test.test-agent2": [call("backup-1")],
"backup-1", },
agent_ids=unordered(["test.test-agent", "test.test-agent2"]),
)
],
), ),
( (
{ {
@ -2093,11 +2073,7 @@ async def test_config_schedule_logic(
"2024-11-12T04:45:00+01:00", "2024-11-12T04:45:00+01:00",
1, 1,
1, 1,
2, {"test.test-agent": [call("backup-1"), call("backup-2")]},
[
call("backup-1", agent_ids=["test.test-agent"]),
call("backup-2", agent_ids=["test.test-agent"]),
],
), ),
( (
{ {
@ -2132,15 +2108,14 @@ async def test_config_schedule_logic(
spec=ManagerBackup, spec=ManagerBackup,
), ),
}, },
{"test-agent": BackupAgentError("Boom!")}, {"test.test-agent": BackupAgentError("Boom!")},
{}, {},
"2024-11-11T04:45:00+01:00", "2024-11-11T04:45:00+01:00",
"2024-11-12T04:45:00+01:00", "2024-11-12T04:45:00+01:00",
"2024-11-12T04:45:00+01:00", "2024-11-12T04:45:00+01:00",
1, 1,
1, 1,
1, {"test.test-agent": [call("backup-1")]},
[call("backup-1", agent_ids=["test.test-agent"])],
), ),
( (
{ {
@ -2176,14 +2151,13 @@ async def test_config_schedule_logic(
), ),
}, },
{}, {},
{"test-agent": BackupAgentError("Boom!")}, {"test.test-agent": BackupAgentError("Boom!")},
"2024-11-11T04:45:00+01:00", "2024-11-11T04:45:00+01:00",
"2024-11-12T04:45:00+01:00", "2024-11-12T04:45:00+01:00",
"2024-11-12T04:45:00+01:00", "2024-11-12T04:45:00+01:00",
1, 1,
1, 1,
1, {"test.test-agent": [call("backup-1")]},
[call("backup-1", agent_ids=["test.test-agent"])],
), ),
( (
{ {
@ -2246,21 +2220,18 @@ async def test_config_schedule_logic(
"2024-11-12T04:45:00+01:00", "2024-11-12T04:45:00+01:00",
1, 1,
1, 1,
3, {
[ "test.test-agent": [
call( call("backup-1"),
"backup-1", call("backup-2"),
agent_ids=unordered(["test.test-agent", "test.test-agent2"]), call("backup-3"),
), ],
call( "test.test-agent2": [
"backup-2", call("backup-1"),
agent_ids=unordered(["test.test-agent", "test.test-agent2"]), call("backup-2"),
), call("backup-3"),
call( ],
"backup-3", },
agent_ids=unordered(["test.test-agent", "test.test-agent2"]),
),
],
), ),
( (
{ {
@ -2322,18 +2293,14 @@ async def test_config_schedule_logic(
"2024-11-12T04:45:00+01:00", "2024-11-12T04:45:00+01:00",
1, 1,
1, 1,
3, {
[ "test.test-agent": [
call( call("backup-1"),
"backup-1", call("backup-2"),
agent_ids=unordered(["test.test-agent", "test.test-agent2"]), call("backup-3"),
), ],
call( "test.test-agent2": [call("backup-1"), call("backup-2")],
"backup-2", },
agent_ids=unordered(["test.test-agent", "test.test-agent2"]),
),
call("backup-3", agent_ids=["test.test-agent"]),
],
), ),
( (
{ {
@ -2363,8 +2330,7 @@ async def test_config_schedule_logic(
"2024-11-12T04:45:00+01:00", "2024-11-12T04:45:00+01:00",
1, 1,
1, 1,
0, {},
[],
), ),
], ],
) )
@ -2375,19 +2341,17 @@ async def test_config_retention_copies_logic(
freezer: FrozenDateTimeFactory, freezer: FrozenDateTimeFactory,
hass_storage: dict[str, Any], hass_storage: dict[str, Any],
create_backup: AsyncMock, create_backup: AsyncMock,
delete_backup: AsyncMock,
get_backups: AsyncMock, get_backups: AsyncMock,
command: dict[str, Any], command: dict[str, Any],
backups: dict[str, Any], backups: dict[str, Any],
get_backups_agent_errors: dict[str, Exception], get_backups_agent_errors: dict[str, Exception],
delete_backup_agent_errors: dict[str, Exception], agent_delete_backup_side_effects: dict[str, Exception],
last_backup_time: str, last_backup_time: str,
next_time: str, next_time: str,
backup_time: str, backup_time: str,
backup_calls: int, backup_calls: int,
get_backups_calls: int, get_backups_calls: int,
delete_calls: int, delete_calls: dict[str, Any],
delete_args_list: Any,
) -> None: ) -> None:
"""Test config backup retention copies logic.""" """Test config backup retention copies logic."""
created_backup: MagicMock = create_backup.return_value[1].result().backup created_backup: MagicMock = create_backup.return_value[1].result().backup
@ -2425,13 +2389,18 @@ async def test_config_retention_copies_logic(
"minor_version": store.STORAGE_VERSION_MINOR, "minor_version": store.STORAGE_VERSION_MINOR,
} }
get_backups.return_value = (backups, get_backups_agent_errors) get_backups.return_value = (backups, get_backups_agent_errors)
delete_backup.return_value = delete_backup_agent_errors
await hass.config.async_set_time_zone("Europe/Amsterdam") await hass.config.async_set_time_zone("Europe/Amsterdam")
freezer.move_to("2024-11-11 12:00:00+01:00") freezer.move_to("2024-11-11 12:00:00+01:00")
await setup_backup_integration(hass, remote_agents=["test-agent"]) await setup_backup_integration(hass, remote_agents=["test-agent", "test-agent2"])
await hass.async_block_till_done() await hass.async_block_till_done()
manager = hass.data[DATA_MANAGER]
for agent_id, agent in manager.backup_agents.items():
agent.async_delete_backup = AsyncMock(
side_effect=agent_delete_backup_side_effects.get(agent_id), autospec=True
)
await client.send_json_auto_id(command) await client.send_json_auto_id(command)
result = await client.receive_json() result = await client.receive_json()
@ -2442,8 +2411,10 @@ async def test_config_retention_copies_logic(
await hass.async_block_till_done() await hass.async_block_till_done()
assert create_backup.call_count == backup_calls assert create_backup.call_count == backup_calls
assert get_backups.call_count == get_backups_calls assert get_backups.call_count == get_backups_calls
assert delete_backup.call_count == delete_calls for agent_id, agent in manager.backup_agents.items():
assert delete_backup.call_args_list == delete_args_list agent_delete_calls = delete_calls.get(agent_id, [])
assert agent.async_delete_backup.call_count == len(agent_delete_calls)
assert agent.async_delete_backup.call_args_list == agent_delete_calls
async_fire_time_changed(hass, fire_all=True) # flush out storage save async_fire_time_changed(hass, fire_all=True) # flush out storage save
await hass.async_block_till_done() await hass.async_block_till_done()
assert ( assert (
@ -2474,11 +2445,9 @@ async def test_config_retention_copies_logic(
"config_command", "config_command",
"backups", "backups",
"get_backups_agent_errors", "get_backups_agent_errors",
"delete_backup_agent_errors",
"backup_calls", "backup_calls",
"get_backups_calls", "get_backups_calls",
"delete_calls", "delete_calls",
"delete_args_list",
), ),
[ [
( (
@ -2515,11 +2484,9 @@ async def test_config_retention_copies_logic(
), ),
}, },
{}, {},
{},
1, 1,
1, # we get backups even if backup retention copies is None 1, # we get backups even if backup retention copies is None
0, {},
[],
), ),
( (
{ {
@ -2555,11 +2522,9 @@ async def test_config_retention_copies_logic(
), ),
}, },
{}, {},
1,
1,
{}, {},
1,
1,
0,
[],
), ),
( (
{ {
@ -2601,11 +2566,9 @@ async def test_config_retention_copies_logic(
), ),
}, },
{}, {},
{},
1, 1,
1, 1,
1, {"test.test-agent": [call("backup-1")]},
[call("backup-1", agent_ids=["test.test-agent"])],
), ),
( (
{ {
@ -2647,14 +2610,9 @@ async def test_config_retention_copies_logic(
), ),
}, },
{}, {},
{},
1, 1,
1, 1,
2, {"test.test-agent": [call("backup-1"), call("backup-2")]},
[
call("backup-1", agent_ids=["test.test-agent"]),
call("backup-2", agent_ids=["test.test-agent"]),
],
), ),
], ],
) )
@ -2664,18 +2622,15 @@ async def test_config_retention_copies_logic_manual_backup(
freezer: FrozenDateTimeFactory, freezer: FrozenDateTimeFactory,
hass_storage: dict[str, Any], hass_storage: dict[str, Any],
create_backup: AsyncMock, create_backup: AsyncMock,
delete_backup: AsyncMock,
get_backups: AsyncMock, get_backups: AsyncMock,
config_command: dict[str, Any], config_command: dict[str, Any],
backup_command: dict[str, Any], backup_command: dict[str, Any],
backups: dict[str, Any], backups: dict[str, Any],
get_backups_agent_errors: dict[str, Exception], get_backups_agent_errors: dict[str, Exception],
delete_backup_agent_errors: dict[str, Exception],
backup_time: str, backup_time: str,
backup_calls: int, backup_calls: int,
get_backups_calls: int, get_backups_calls: int,
delete_calls: int, delete_calls: dict[str, Any],
delete_args_list: Any,
) -> None: ) -> None:
"""Test config backup retention copies logic for manual backup.""" """Test config backup retention copies logic for manual backup."""
created_backup: MagicMock = create_backup.return_value[1].result().backup created_backup: MagicMock = create_backup.return_value[1].result().backup
@ -2713,13 +2668,16 @@ async def test_config_retention_copies_logic_manual_backup(
"minor_version": store.STORAGE_VERSION_MINOR, "minor_version": store.STORAGE_VERSION_MINOR,
} }
get_backups.return_value = (backups, get_backups_agent_errors) get_backups.return_value = (backups, get_backups_agent_errors)
delete_backup.return_value = delete_backup_agent_errors
await hass.config.async_set_time_zone("Europe/Amsterdam") await hass.config.async_set_time_zone("Europe/Amsterdam")
freezer.move_to("2024-11-11 12:00:00+01:00") freezer.move_to("2024-11-11 12:00:00+01:00")
await setup_backup_integration(hass, remote_agents=["test-agent"]) await setup_backup_integration(hass, remote_agents=["test-agent"])
await hass.async_block_till_done() await hass.async_block_till_done()
manager = hass.data[DATA_MANAGER]
for agent in manager.backup_agents.values():
agent.async_delete_backup = AsyncMock(autospec=True)
await client.send_json_auto_id(config_command) await client.send_json_auto_id(config_command)
result = await client.receive_json() result = await client.receive_json()
assert result["success"] assert result["success"]
@ -2734,8 +2692,10 @@ async def test_config_retention_copies_logic_manual_backup(
assert create_backup.call_count == backup_calls assert create_backup.call_count == backup_calls
assert get_backups.call_count == get_backups_calls assert get_backups.call_count == get_backups_calls
assert delete_backup.call_count == delete_calls for agent_id, agent in manager.backup_agents.items():
assert delete_backup.call_args_list == delete_args_list agent_delete_calls = delete_calls.get(agent_id, [])
assert agent.async_delete_backup.call_count == len(agent_delete_calls)
assert agent.async_delete_backup.call_args_list == agent_delete_calls
async_fire_time_changed(hass, fire_all=True) # flush out storage save async_fire_time_changed(hass, fire_all=True) # flush out storage save
await hass.async_block_till_done() await hass.async_block_till_done()
assert ( assert (
@ -2754,13 +2714,12 @@ async def test_config_retention_copies_logic_manual_backup(
"commands", "commands",
"backups", "backups",
"get_backups_agent_errors", "get_backups_agent_errors",
"delete_backup_agent_errors", "agent_delete_backup_side_effects",
"last_backup_time", "last_backup_time",
"start_time", "start_time",
"next_time", "next_time",
"get_backups_calls", "get_backups_calls",
"delete_calls", "delete_calls",
"delete_args_list",
), ),
[ [
# No config update - cleanup backups older than 2 days # No config update - cleanup backups older than 2 days
@ -2793,8 +2752,7 @@ async def test_config_retention_copies_logic_manual_backup(
"2024-11-11T12:00:00+01:00", "2024-11-11T12:00:00+01:00",
"2024-11-12T12:00:00+01:00", "2024-11-12T12:00:00+01:00",
1, 1,
1, {"test.test-agent": [call("backup-1")]},
[call("backup-1", agent_ids=["test.test-agent"])],
), ),
# No config update - No cleanup # No config update - No cleanup
( (
@ -2826,8 +2784,7 @@ async def test_config_retention_copies_logic_manual_backup(
"2024-11-11T12:00:00+01:00", "2024-11-11T12:00:00+01:00",
"2024-11-12T12:00:00+01:00", "2024-11-12T12:00:00+01:00",
0, 0,
0, {},
[],
), ),
# Unchanged config # Unchanged config
( (
@ -2866,8 +2823,7 @@ async def test_config_retention_copies_logic_manual_backup(
"2024-11-11T12:00:00+01:00", "2024-11-11T12:00:00+01:00",
"2024-11-12T12:00:00+01:00", "2024-11-12T12:00:00+01:00",
1, 1,
1, {"test.test-agent": [call("backup-1")]},
[call("backup-1", agent_ids=["test.test-agent"])],
), ),
( (
None, None,
@ -2905,8 +2861,7 @@ async def test_config_retention_copies_logic_manual_backup(
"2024-11-11T12:00:00+01:00", "2024-11-11T12:00:00+01:00",
"2024-11-12T12:00:00+01:00", "2024-11-12T12:00:00+01:00",
1, 1,
1, {"test.test-agent": [call("backup-1")]},
[call("backup-1", agent_ids=["test.test-agent"])],
), ),
( (
None, None,
@ -2944,8 +2899,7 @@ async def test_config_retention_copies_logic_manual_backup(
"2024-11-11T12:00:00+01:00", "2024-11-11T12:00:00+01:00",
"2024-11-12T12:00:00+01:00", "2024-11-12T12:00:00+01:00",
1, 1,
0, {},
[],
), ),
( (
None, None,
@ -2989,11 +2943,7 @@ async def test_config_retention_copies_logic_manual_backup(
"2024-11-11T12:00:00+01:00", "2024-11-11T12:00:00+01:00",
"2024-11-12T12:00:00+01:00", "2024-11-12T12:00:00+01:00",
1, 1,
2, {"test.test-agent": [call("backup-1"), call("backup-2")]},
[
call("backup-1", agent_ids=["test.test-agent"]),
call("backup-2", agent_ids=["test.test-agent"]),
],
), ),
( (
None, None,
@ -3031,8 +2981,7 @@ async def test_config_retention_copies_logic_manual_backup(
"2024-11-11T12:00:00+01:00", "2024-11-11T12:00:00+01:00",
"2024-11-12T12:00:00+01:00", "2024-11-12T12:00:00+01:00",
1, 1,
1, {"test.test-agent": [call("backup-1")]},
[call("backup-1", agent_ids=["test.test-agent"])],
), ),
( (
None, None,
@ -3070,8 +3019,7 @@ async def test_config_retention_copies_logic_manual_backup(
"2024-11-11T12:00:00+01:00", "2024-11-11T12:00:00+01:00",
"2024-11-12T12:00:00+01:00", "2024-11-12T12:00:00+01:00",
1, 1,
1, {"test.test-agent": [call("backup-1")]},
[call("backup-1", agent_ids=["test.test-agent"])],
), ),
( (
None, None,
@ -3115,11 +3063,7 @@ async def test_config_retention_copies_logic_manual_backup(
"2024-11-11T12:00:00+01:00", "2024-11-11T12:00:00+01:00",
"2024-11-12T12:00:00+01:00", "2024-11-12T12:00:00+01:00",
1, 1,
2, {"test.test-agent": [call("backup-1"), call("backup-2")]},
[
call("backup-1", agent_ids=["test.test-agent"]),
call("backup-2", agent_ids=["test.test-agent"]),
],
), ),
], ],
) )
@ -3128,19 +3072,17 @@ async def test_config_retention_days_logic(
hass_ws_client: WebSocketGenerator, hass_ws_client: WebSocketGenerator,
freezer: FrozenDateTimeFactory, freezer: FrozenDateTimeFactory,
hass_storage: dict[str, Any], hass_storage: dict[str, Any],
delete_backup: AsyncMock,
get_backups: AsyncMock, get_backups: AsyncMock,
stored_retained_days: int | None, stored_retained_days: int | None,
commands: list[dict[str, Any]], commands: list[dict[str, Any]],
backups: dict[str, Any], backups: dict[str, Any],
get_backups_agent_errors: dict[str, Exception], get_backups_agent_errors: dict[str, Exception],
delete_backup_agent_errors: dict[str, Exception], agent_delete_backup_side_effects: dict[str, Exception],
last_backup_time: str, last_backup_time: str,
start_time: str, start_time: str,
next_time: str, next_time: str,
get_backups_calls: int, get_backups_calls: int,
delete_calls: int, delete_calls: dict[str, Any],
delete_args_list: list[Any],
) -> None: ) -> None:
"""Test config backup retention logic.""" """Test config backup retention logic."""
client = await hass_ws_client(hass) client = await hass_ws_client(hass)
@ -3175,13 +3117,18 @@ async def test_config_retention_days_logic(
"minor_version": store.STORAGE_VERSION_MINOR, "minor_version": store.STORAGE_VERSION_MINOR,
} }
get_backups.return_value = (backups, get_backups_agent_errors) get_backups.return_value = (backups, get_backups_agent_errors)
delete_backup.return_value = delete_backup_agent_errors
await hass.config.async_set_time_zone("Europe/Amsterdam") await hass.config.async_set_time_zone("Europe/Amsterdam")
freezer.move_to(start_time) freezer.move_to(start_time)
await setup_backup_integration(hass) await setup_backup_integration(hass, remote_agents=["test-agent"])
await hass.async_block_till_done() await hass.async_block_till_done()
manager = hass.data[DATA_MANAGER]
for agent_id, agent in manager.backup_agents.items():
agent.async_delete_backup = AsyncMock(
side_effect=agent_delete_backup_side_effects.get(agent_id), autospec=True
)
for command in commands: for command in commands:
await client.send_json_auto_id(command) await client.send_json_auto_id(command)
result = await client.receive_json() result = await client.receive_json()
@ -3191,8 +3138,10 @@ async def test_config_retention_days_logic(
async_fire_time_changed(hass) async_fire_time_changed(hass)
await hass.async_block_till_done() await hass.async_block_till_done()
assert get_backups.call_count == get_backups_calls assert get_backups.call_count == get_backups_calls
assert delete_backup.call_count == delete_calls for agent_id, agent in manager.backup_agents.items():
assert delete_backup.call_args_list == delete_args_list agent_delete_calls = delete_calls.get(agent_id, [])
assert agent.async_delete_backup.call_count == len(agent_delete_calls)
assert agent.async_delete_backup.call_args_list == agent_delete_calls
async_fire_time_changed(hass, fire_all=True) # flush out storage save async_fire_time_changed(hass, fire_all=True) # flush out storage save
await hass.async_block_till_done() await hass.async_block_till_done()

View File

@ -9,6 +9,7 @@ from hass_nabucasa import Cloud
from hass_nabucasa.auth import CognitoAuth from hass_nabucasa.auth import CognitoAuth
from hass_nabucasa.cloudhooks import Cloudhooks from hass_nabucasa.cloudhooks import Cloudhooks
from hass_nabucasa.const import DEFAULT_SERVERS, DEFAULT_VALUES, STATE_CONNECTED from hass_nabucasa.const import DEFAULT_SERVERS, DEFAULT_VALUES, STATE_CONNECTED
from hass_nabucasa.files import Files
from hass_nabucasa.google_report_state import GoogleReportState from hass_nabucasa.google_report_state import GoogleReportState
from hass_nabucasa.ice_servers import IceServers from hass_nabucasa.ice_servers import IceServers
from hass_nabucasa.iot import CloudIoT from hass_nabucasa.iot import CloudIoT
@ -68,6 +69,7 @@ async def cloud_fixture() -> AsyncGenerator[MagicMock]:
spec=CloudIoT, last_disconnect_reason=None, state=STATE_CONNECTED spec=CloudIoT, last_disconnect_reason=None, state=STATE_CONNECTED
) )
mock_cloud.voice = MagicMock(spec=Voice) mock_cloud.voice = MagicMock(spec=Voice)
mock_cloud.files = MagicMock(spec=Files)
mock_cloud.started = None mock_cloud.started = None
mock_cloud.ice_servers = MagicMock( mock_cloud.ice_servers = MagicMock(
spec=IceServers, spec=IceServers,

View File

@ -1,14 +1,15 @@
"""Test the cloud backup platform.""" """Test the cloud backup platform."""
from collections.abc import AsyncGenerator, AsyncIterator, Generator from collections.abc import AsyncGenerator, Generator
from io import StringIO from io import StringIO
from typing import Any from typing import Any
from unittest.mock import Mock, PropertyMock, patch from unittest.mock import Mock, PropertyMock, patch
from aiohttp import ClientError from aiohttp import ClientError
from hass_nabucasa import CloudError from hass_nabucasa import CloudError
from hass_nabucasa.api import CloudApiNonRetryableError
from hass_nabucasa.files import FilesError
import pytest import pytest
from yarl import URL
from homeassistant.components.backup import ( from homeassistant.components.backup import (
DOMAIN as BACKUP_DOMAIN, DOMAIN as BACKUP_DOMAIN,
@ -22,11 +23,20 @@ from homeassistant.components.cloud.const import EVENT_CLOUD_EVENT
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.setup import async_setup_component from homeassistant.setup import async_setup_component
from homeassistant.util.aiohttp import MockStreamReader
from tests.test_util.aiohttp import AiohttpClientMocker from tests.test_util.aiohttp import AiohttpClientMocker
from tests.typing import ClientSessionGenerator, MagicMock, WebSocketGenerator from tests.typing import ClientSessionGenerator, MagicMock, WebSocketGenerator
class MockStreamReaderChunked(MockStreamReader):
"""Mock a stream reader with simulated chunked data."""
async def readchunk(self) -> tuple[bytes, bool]:
"""Read bytes."""
return (self._content.read(), False)
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
async def setup_integration( async def setup_integration(
hass: HomeAssistant, hass: HomeAssistant,
@ -55,49 +65,6 @@ def mock_delete_file() -> Generator[MagicMock]:
yield delete_file yield delete_file
@pytest.fixture
def mock_get_download_details() -> Generator[MagicMock]:
"""Mock list files."""
with patch(
"homeassistant.components.cloud.backup.async_files_download_details",
spec_set=True,
) as download_details:
download_details.return_value = {
"url": (
"https://blabla.cloudflarestorage.com/blabla/backup/"
"462e16810d6841228828d9dd2f9e341e.tar?X-Amz-Algorithm=blah"
),
}
yield download_details
@pytest.fixture
def mock_get_upload_details() -> Generator[MagicMock]:
"""Mock list files."""
with patch(
"homeassistant.components.cloud.backup.async_files_upload_details",
spec_set=True,
) as download_details:
download_details.return_value = {
"url": (
"https://blabla.cloudflarestorage.com/blabla/backup/"
"ea5c969e492c49df89d432a1483b8dc3.tar?X-Amz-Algorithm=blah"
),
"headers": {
"content-md5": "HOhSM3WZkpHRYGiz4YRGIQ==",
"x-amz-meta-storage-type": "backup",
"x-amz-meta-b64json": (
"eyJhZGRvbnMiOltdLCJiYWNrdXBfaWQiOiJjNDNiNWU2MCIsImRhdGUiOiIyMDI0LT"
"EyLTAzVDA0OjI1OjUwLjMyMDcwMy0wNTowMCIsImRhdGFiYXNlX2luY2x1ZGVkIjpm"
"YWxzZSwiZm9sZGVycyI6W10sImhvbWVhc3Npc3RhbnRfaW5jbHVkZWQiOnRydWUsIm"
"hvbWVhc3Npc3RhbnRfdmVyc2lvbiI6IjIwMjQuMTIuMC5kZXYwIiwibmFtZSI6ImVy"
"aWsiLCJwcm90ZWN0ZWQiOnRydWUsInNpemUiOjM1NjI0OTYwfQ=="
),
},
}
yield download_details
@pytest.fixture @pytest.fixture
def mock_list_files() -> Generator[MagicMock]: def mock_list_files() -> Generator[MagicMock]:
"""Mock list files.""" """Mock list files."""
@ -264,52 +231,30 @@ async def test_agents_download(
hass: HomeAssistant, hass: HomeAssistant,
hass_client: ClientSessionGenerator, hass_client: ClientSessionGenerator,
aioclient_mock: AiohttpClientMocker, aioclient_mock: AiohttpClientMocker,
mock_get_download_details: Mock, cloud: Mock,
) -> None: ) -> None:
"""Test agent download backup.""" """Test agent download backup."""
client = await hass_client() client = await hass_client()
backup_id = "23e64aec" backup_id = "23e64aec"
aioclient_mock.get( cloud.files.download.return_value = MockStreamReaderChunked(b"backup data")
mock_get_download_details.return_value["url"], content=b"backup data"
)
resp = await client.get(f"/api/backup/download/{backup_id}?agent_id=cloud.cloud") resp = await client.get(f"/api/backup/download/{backup_id}?agent_id=cloud.cloud")
assert resp.status == 200 assert resp.status == 200
assert await resp.content.read() == b"backup data" assert await resp.content.read() == b"backup data"
@pytest.mark.parametrize("side_effect", [ClientError, CloudError])
@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files")
async def test_agents_download_fail_cloud(
hass: HomeAssistant,
hass_client: ClientSessionGenerator,
mock_get_download_details: Mock,
side_effect: Exception,
) -> None:
"""Test agent download backup, when cloud user is logged in."""
client = await hass_client()
backup_id = "23e64aec"
mock_get_download_details.side_effect = side_effect
resp = await client.get(f"/api/backup/download/{backup_id}?agent_id=cloud.cloud")
assert resp.status == 500
content = await resp.content.read()
assert "Failed to get download details" in content.decode()
@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") @pytest.mark.usefixtures("cloud_logged_in", "mock_list_files")
async def test_agents_download_fail_get( async def test_agents_download_fail_get(
hass: HomeAssistant, hass: HomeAssistant,
hass_client: ClientSessionGenerator, hass_client: ClientSessionGenerator,
aioclient_mock: AiohttpClientMocker, cloud: Mock,
mock_get_download_details: Mock,
) -> None: ) -> None:
"""Test agent download backup, when cloud user is logged in.""" """Test agent download backup, when cloud user is logged in."""
client = await hass_client() client = await hass_client()
backup_id = "23e64aec" backup_id = "23e64aec"
aioclient_mock.get(mock_get_download_details.return_value["url"], status=500) cloud.files.download.side_effect = FilesError("Oh no :(")
resp = await client.get(f"/api/backup/download/{backup_id}?agent_id=cloud.cloud") resp = await client.get(f"/api/backup/download/{backup_id}?agent_id=cloud.cloud")
assert resp.status == 500 assert resp.status == 500
@ -336,11 +281,11 @@ async def test_agents_upload(
hass: HomeAssistant, hass: HomeAssistant,
hass_client: ClientSessionGenerator, hass_client: ClientSessionGenerator,
caplog: pytest.LogCaptureFixture, caplog: pytest.LogCaptureFixture,
aioclient_mock: AiohttpClientMocker, cloud: Mock,
mock_get_upload_details: Mock,
) -> None: ) -> None:
"""Test agent upload backup.""" """Test agent upload backup."""
client = await hass_client() client = await hass_client()
backup_data = "test"
backup_id = "test-backup" backup_id = "test-backup"
test_backup = AgentBackup( test_backup = AgentBackup(
addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], addons=[AddonInfo(name="Test", slug="test", version="1.0.0")],
@ -353,10 +298,8 @@ async def test_agents_upload(
homeassistant_version="2024.12.0", homeassistant_version="2024.12.0",
name="Test", name="Test",
protected=True, protected=True,
size=0, size=len(backup_data),
) )
aioclient_mock.put(mock_get_upload_details.return_value["url"])
with ( with (
patch( patch(
"homeassistant.components.backup.manager.BackupManager.async_get_backup", "homeassistant.components.backup.manager.BackupManager.async_get_backup",
@ -367,37 +310,34 @@ async def test_agents_upload(
), ),
patch("pathlib.Path.open") as mocked_open, patch("pathlib.Path.open") as mocked_open,
): ):
mocked_open.return_value.read = Mock(side_effect=[b"test", b""]) mocked_open.return_value.read = Mock(side_effect=[backup_data.encode(), b""])
fetch_backup.return_value = test_backup fetch_backup.return_value = test_backup
resp = await client.post( resp = await client.post(
"/api/backup/upload?agent_id=cloud.cloud", "/api/backup/upload?agent_id=cloud.cloud",
data={"file": StringIO("test")}, data={"file": StringIO(backup_data)},
) )
assert len(aioclient_mock.mock_calls) == 1 assert len(cloud.files.upload.mock_calls) == 1
assert aioclient_mock.mock_calls[-1][0] == "PUT" metadata = cloud.files.upload.mock_calls[-1].kwargs["metadata"]
assert aioclient_mock.mock_calls[-1][1] == URL( assert metadata["backup_id"] == backup_id
mock_get_upload_details.return_value["url"]
)
assert isinstance(aioclient_mock.mock_calls[-1][2], AsyncIterator)
assert resp.status == 201 assert resp.status == 201
assert f"Uploading backup {backup_id}" in caplog.text assert f"Uploading backup {backup_id}" in caplog.text
@pytest.mark.parametrize("put_mock_kwargs", [{"status": 500}, {"exc": TimeoutError}]) @pytest.mark.parametrize("side_effect", [FilesError("Boom!"), CloudError("Boom!")])
@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") @pytest.mark.usefixtures("cloud_logged_in", "mock_list_files")
async def test_agents_upload_fail_put( async def test_agents_upload_fail(
hass: HomeAssistant, hass: HomeAssistant,
hass_client: ClientSessionGenerator, hass_client: ClientSessionGenerator,
hass_storage: dict[str, Any], hass_storage: dict[str, Any],
aioclient_mock: AiohttpClientMocker, side_effect: Exception,
mock_get_upload_details: Mock, cloud: Mock,
put_mock_kwargs: dict[str, Any],
caplog: pytest.LogCaptureFixture, caplog: pytest.LogCaptureFixture,
) -> None: ) -> None:
"""Test agent upload backup fails.""" """Test agent upload backup fails."""
client = await hass_client() client = await hass_client()
backup_data = "test"
backup_id = "test-backup" backup_id = "test-backup"
test_backup = AgentBackup( test_backup = AgentBackup(
addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], addons=[AddonInfo(name="Test", slug="test", version="1.0.0")],
@ -410,9 +350,10 @@ async def test_agents_upload_fail_put(
homeassistant_version="2024.12.0", homeassistant_version="2024.12.0",
name="Test", name="Test",
protected=True, protected=True,
size=0, size=len(backup_data),
) )
aioclient_mock.put(mock_get_upload_details.return_value["url"], **put_mock_kwargs)
cloud.files.upload.side_effect = side_effect
with ( with (
patch( patch(
@ -427,17 +368,17 @@ async def test_agents_upload_fail_put(
patch("homeassistant.components.cloud.backup.random.randint", return_value=60), patch("homeassistant.components.cloud.backup.random.randint", return_value=60),
patch("homeassistant.components.cloud.backup._RETRY_LIMIT", 2), patch("homeassistant.components.cloud.backup._RETRY_LIMIT", 2),
): ):
mocked_open.return_value.read = Mock(side_effect=[b"test", b""]) mocked_open.return_value.read = Mock(side_effect=[backup_data.encode(), b""])
fetch_backup.return_value = test_backup fetch_backup.return_value = test_backup
resp = await client.post( resp = await client.post(
"/api/backup/upload?agent_id=cloud.cloud", "/api/backup/upload?agent_id=cloud.cloud",
data={"file": StringIO("test")}, data={"file": StringIO(backup_data)},
) )
await hass.async_block_till_done() await hass.async_block_till_done()
assert len(aioclient_mock.mock_calls) == 2
assert "Failed to upload backup, retrying (2/2) in 60s" in caplog.text assert "Failed to upload backup, retrying (2/2) in 60s" in caplog.text
assert resp.status == 201 assert resp.status == 201
assert cloud.files.upload.call_count == 2
store_backups = hass_storage[BACKUP_DOMAIN]["data"]["backups"] store_backups = hass_storage[BACKUP_DOMAIN]["data"]["backups"]
assert len(store_backups) == 1 assert len(store_backups) == 1
stored_backup = store_backups[0] stored_backup = store_backups[0]
@ -445,19 +386,33 @@ async def test_agents_upload_fail_put(
assert stored_backup["failed_agent_ids"] == ["cloud.cloud"] assert stored_backup["failed_agent_ids"] == ["cloud.cloud"]
@pytest.mark.parametrize("side_effect", [ClientError, CloudError]) @pytest.mark.parametrize(
@pytest.mark.usefixtures("cloud_logged_in") ("side_effect", "logmsg"),
async def test_agents_upload_fail_cloud( [
(
CloudApiNonRetryableError("Boom!", code="NC-SH-FH-03"),
"The backup size of 13.37GB is too large to be uploaded to Home Assistant Cloud",
),
(
CloudApiNonRetryableError("Boom!", code="NC-CE-01"),
"Failed to upload backup Boom!",
),
],
)
@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files")
async def test_agents_upload_fail_non_retryable(
hass: HomeAssistant, hass: HomeAssistant,
hass_client: ClientSessionGenerator, hass_client: ClientSessionGenerator,
hass_storage: dict[str, Any], hass_storage: dict[str, Any],
mock_get_upload_details: Mock,
side_effect: Exception, side_effect: Exception,
logmsg: str,
cloud: Mock,
caplog: pytest.LogCaptureFixture,
) -> None: ) -> None:
"""Test agent upload backup, when cloud user is logged in.""" """Test agent upload backup fails with non-retryable error."""
client = await hass_client() client = await hass_client()
backup_data = "test"
backup_id = "test-backup" backup_id = "test-backup"
mock_get_upload_details.side_effect = side_effect
test_backup = AgentBackup( test_backup = AgentBackup(
addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], addons=[AddonInfo(name="Test", slug="test", version="1.0.0")],
backup_id=backup_id, backup_id=backup_id,
@ -469,8 +424,11 @@ async def test_agents_upload_fail_cloud(
homeassistant_version="2024.12.0", homeassistant_version="2024.12.0",
name="Test", name="Test",
protected=True, protected=True,
size=0, size=14358124749,
) )
cloud.files.upload.side_effect = side_effect
with ( with (
patch( patch(
"homeassistant.components.backup.manager.BackupManager.async_get_backup", "homeassistant.components.backup.manager.BackupManager.async_get_backup",
@ -480,17 +438,19 @@ async def test_agents_upload_fail_cloud(
return_value=test_backup, return_value=test_backup,
), ),
patch("pathlib.Path.open") as mocked_open, patch("pathlib.Path.open") as mocked_open,
patch("homeassistant.components.cloud.backup.asyncio.sleep"), patch("homeassistant.components.cloud.backup.calculate_b64md5"),
): ):
mocked_open.return_value.read = Mock(side_effect=[b"test", b""]) mocked_open.return_value.read = Mock(side_effect=[backup_data.encode(), b""])
fetch_backup.return_value = test_backup fetch_backup.return_value = test_backup
resp = await client.post( resp = await client.post(
"/api/backup/upload?agent_id=cloud.cloud", "/api/backup/upload?agent_id=cloud.cloud",
data={"file": StringIO("test")}, data={"file": StringIO(backup_data)},
) )
await hass.async_block_till_done() await hass.async_block_till_done()
assert logmsg in caplog.text
assert resp.status == 201 assert resp.status == 201
assert cloud.files.upload.call_count == 1
store_backups = hass_storage[BACKUP_DOMAIN]["data"]["backups"] store_backups = hass_storage[BACKUP_DOMAIN]["data"]["backups"]
assert len(store_backups) == 1 assert len(store_backups) == 1
stored_backup = store_backups[0] stored_backup = store_backups[0]
@ -505,6 +465,7 @@ async def test_agents_upload_not_protected(
) -> None: ) -> None:
"""Test agent upload backup, when cloud user is logged in.""" """Test agent upload backup, when cloud user is logged in."""
client = await hass_client() client = await hass_client()
backup_data = "test"
backup_id = "test-backup" backup_id = "test-backup"
test_backup = AgentBackup( test_backup = AgentBackup(
addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], addons=[AddonInfo(name="Test", slug="test", version="1.0.0")],
@ -517,7 +478,7 @@ async def test_agents_upload_not_protected(
homeassistant_version="2024.12.0", homeassistant_version="2024.12.0",
name="Test", name="Test",
protected=False, protected=False,
size=0, size=len(backup_data),
) )
with ( with (
patch("pathlib.Path.open"), patch("pathlib.Path.open"),
@ -528,7 +489,7 @@ async def test_agents_upload_not_protected(
): ):
resp = await client.post( resp = await client.post(
"/api/backup/upload?agent_id=cloud.cloud", "/api/backup/upload?agent_id=cloud.cloud",
data={"file": StringIO("test")}, data={"file": StringIO(backup_data)},
) )
await hass.async_block_till_done() await hass.async_block_till_done()
@ -540,6 +501,53 @@ async def test_agents_upload_not_protected(
assert stored_backup["failed_agent_ids"] == ["cloud.cloud"] assert stored_backup["failed_agent_ids"] == ["cloud.cloud"]
@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files")
async def test_agents_upload_wrong_size(
hass: HomeAssistant,
hass_client: ClientSessionGenerator,
caplog: pytest.LogCaptureFixture,
cloud: Mock,
) -> None:
"""Test agent upload backup with the wrong size."""
client = await hass_client()
backup_data = "test"
backup_id = "test-backup"
test_backup = AgentBackup(
addons=[AddonInfo(name="Test", slug="test", version="1.0.0")],
backup_id=backup_id,
database_included=True,
date="1970-01-01T00:00:00.000Z",
extra_metadata={},
folders=[Folder.MEDIA, Folder.SHARE],
homeassistant_included=True,
homeassistant_version="2024.12.0",
name="Test",
protected=True,
size=len(backup_data) - 1,
)
with (
patch(
"homeassistant.components.backup.manager.BackupManager.async_get_backup",
) as fetch_backup,
patch(
"homeassistant.components.backup.manager.read_backup",
return_value=test_backup,
),
patch("pathlib.Path.open") as mocked_open,
):
mocked_open.return_value.read = Mock(side_effect=[backup_data.encode(), b""])
fetch_backup.return_value = test_backup
resp = await client.post(
"/api/backup/upload?agent_id=cloud.cloud",
data={"file": StringIO(backup_data)},
)
assert len(cloud.files.upload.mock_calls) == 0
assert resp.status == 201
assert "Upload failed for cloud.cloud" in caplog.text
@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") @pytest.mark.usefixtures("cloud_logged_in", "mock_list_files")
async def test_agents_delete( async def test_agents_delete(
hass: HomeAssistant, hass: HomeAssistant,

View File

@ -93,7 +93,8 @@
"reserved_soc": 15.0, "reserved_soc": 15.0,
"very_low_soc": 5, "very_low_soc": 5,
"charge_from_grid": true, "charge_from_grid": true,
"date": "1695598084" "date": "1695598084",
"opt_schedules": true
}, },
"single_rate": { "single_rate": {
"rate": 0.0, "rate": 0.0,

View File

@ -235,7 +235,8 @@
"reserved_soc": 0.0, "reserved_soc": 0.0,
"very_low_soc": 5, "very_low_soc": 5,
"charge_from_grid": true, "charge_from_grid": true,
"date": "1714749724" "date": "1714749724",
"opt_schedules": true
}, },
"single_rate": { "single_rate": {
"rate": 0.0, "rate": 0.0,

View File

@ -223,7 +223,8 @@
"reserved_soc": 0.0, "reserved_soc": 0.0,
"very_low_soc": 5, "very_low_soc": 5,
"charge_from_grid": true, "charge_from_grid": true,
"date": "1714749724" "date": "1714749724",
"opt_schedules": true
}, },
"single_rate": { "single_rate": {
"rate": 0.0, "rate": 0.0,

View File

@ -427,7 +427,8 @@
"reserved_soc": 15.0, "reserved_soc": 15.0,
"very_low_soc": 5, "very_low_soc": 5,
"charge_from_grid": true, "charge_from_grid": true,
"date": "1695598084" "date": "1695598084",
"opt_schedules": true
}, },
"single_rate": { "single_rate": {
"rate": 0.0, "rate": 0.0,

View File

@ -242,7 +242,8 @@
"reserved_soc": 15.0, "reserved_soc": 15.0,
"very_low_soc": 5, "very_low_soc": 5,
"charge_from_grid": true, "charge_from_grid": true,
"date": "1695598084" "date": "1695598084",
"opt_schedules": true
}, },
"single_rate": { "single_rate": {
"rate": 0.0, "rate": 0.0,

View File

@ -88,7 +88,8 @@
"reserved_soc": 15.0, "reserved_soc": 15.0,
"very_low_soc": 5, "very_low_soc": 5,
"charge_from_grid": true, "charge_from_grid": true,
"date": "1695598084" "date": "1695598084",
"opt_schedules": true
}, },
"single_rate": { "single_rate": {
"rate": 0.0, "rate": 0.0,

View File

@ -9,10 +9,12 @@ relevant modes.
""" """
from collections.abc import Generator from collections.abc import Generator
import datetime
from http import HTTPStatus from http import HTTPStatus
import logging import logging
from unittest.mock import AsyncMock, patch from unittest.mock import AsyncMock, patch
import aiohttp
from google_nest_sdm.exceptions import ( from google_nest_sdm.exceptions import (
ApiException, ApiException,
AuthException, AuthException,
@ -22,6 +24,7 @@ from google_nest_sdm.exceptions import (
import pytest import pytest
from homeassistant.components.nest import DOMAIN from homeassistant.components.nest import DOMAIN
from homeassistant.components.nest.const import OAUTH2_TOKEN
from homeassistant.config_entries import ConfigEntryState from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
@ -36,6 +39,8 @@ from tests.test_util.aiohttp import AiohttpClientMocker
PLATFORM = "sensor" PLATFORM = "sensor"
EXPIRED_TOKEN_TIMESTAMP = datetime.datetime(2022, 4, 8).timestamp()
@pytest.fixture @pytest.fixture
def platforms() -> list[str]: def platforms() -> list[str]:
@ -139,6 +144,55 @@ async def test_setup_device_manager_failure(
assert entries[0].state is ConfigEntryState.SETUP_RETRY assert entries[0].state is ConfigEntryState.SETUP_RETRY
@pytest.mark.parametrize("token_expiration_time", [EXPIRED_TOKEN_TIMESTAMP])
@pytest.mark.parametrize(
("token_response_args", "expected_state", "expected_steps"),
[
# Cases that retry integration setup
(
{"status": HTTPStatus.INTERNAL_SERVER_ERROR},
ConfigEntryState.SETUP_RETRY,
[],
),
({"exc": aiohttp.ClientError("No internet")}, ConfigEntryState.SETUP_RETRY, []),
# Cases that require the user to reauthenticate in a config flow
(
{"status": HTTPStatus.BAD_REQUEST},
ConfigEntryState.SETUP_ERROR,
["reauth_confirm"],
),
(
{"status": HTTPStatus.FORBIDDEN},
ConfigEntryState.SETUP_ERROR,
["reauth_confirm"],
),
],
)
async def test_expired_token_refresh_error(
hass: HomeAssistant,
setup_base_platform: PlatformSetup,
aioclient_mock: AiohttpClientMocker,
token_response_args: dict,
expected_state: ConfigEntryState,
expected_steps: list[str],
) -> None:
"""Test errors when attempting to refresh the auth token."""
aioclient_mock.post(
OAUTH2_TOKEN,
**token_response_args,
)
await setup_base_platform()
entries = hass.config_entries.async_entries(DOMAIN)
assert len(entries) == 1
assert entries[0].state is expected_state
flows = hass.config_entries.flow.async_progress()
assert expected_steps == [flow["step_id"] for flow in flows]
@pytest.mark.parametrize("subscriber_side_effect", [AuthException()]) @pytest.mark.parametrize("subscriber_side_effect", [AuthException()])
async def test_subscriber_auth_failure( async def test_subscriber_auth_failure(
hass: HomeAssistant, hass: HomeAssistant,