This commit is contained in:
Franck Nijhof 2025-02-21 22:30:20 +01:00 committed by GitHub
commit cc792403ab
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
60 changed files with 1600 additions and 192 deletions

View File

@ -6,6 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/airgradient",
"integration_type": "device",
"iot_class": "local_polling",
"requirements": ["airgradient==0.9.1"],
"requirements": ["airgradient==0.9.2"],
"zeroconf": ["_airgradient._tcp.local."]
}

View File

@ -16,6 +16,7 @@ from .agent import (
BackupAgentPlatformProtocol,
LocalBackupAgent,
)
from .config import BackupConfig, CreateBackupParametersDict
from .const import DATA_MANAGER, DOMAIN
from .http import async_register_http_views
from .manager import (
@ -47,12 +48,14 @@ __all__ = [
"BackupAgent",
"BackupAgentError",
"BackupAgentPlatformProtocol",
"BackupConfig",
"BackupManagerError",
"BackupNotFound",
"BackupPlatformProtocol",
"BackupReaderWriter",
"BackupReaderWriterError",
"CreateBackupEvent",
"CreateBackupParametersDict",
"CreateBackupStage",
"CreateBackupState",
"Folder",

View File

@ -154,7 +154,8 @@ class BackupConfig:
self.data.retention.apply(self._manager)
self.data.schedule.apply(self._manager)
async def update(
@callback
def update(
self,
*,
agents: dict[str, AgentParametersDict] | UndefinedType = UNDEFINED,

View File

@ -43,7 +43,11 @@ from .agent import (
BackupAgentPlatformProtocol,
LocalBackupAgent,
)
from .config import BackupConfig, delete_backups_exceeding_configured_count
from .config import (
BackupConfig,
CreateBackupParametersDict,
delete_backups_exceeding_configured_count,
)
from .const import (
BUF_SIZE,
DATA_MANAGER,
@ -282,6 +286,10 @@ class BackupReaderWriter(abc.ABC):
) -> None:
"""Get restore events after core restart."""
@abc.abstractmethod
async def async_validate_config(self, *, config: BackupConfig) -> None:
"""Validate backup config."""
class IncorrectPasswordError(BackupReaderWriterError):
"""Raised when the password is incorrect."""
@ -333,6 +341,7 @@ class BackupManager:
self.config.load(stored["config"])
self.known_backups.load(stored["backups"])
await self._reader_writer.async_validate_config(config=self.config)
await self._reader_writer.async_resume_restore_progress_after_restart(
on_progress=self.async_on_backup_event
)
@ -1832,6 +1841,44 @@ class CoreBackupReaderWriter(BackupReaderWriter):
)
on_progress(IdleEvent())
async def async_validate_config(self, *, config: BackupConfig) -> None:
"""Validate backup config.
Update automatic backup settings to not include addons or folders and remove
hassio agents in case a backup created by supervisor was restored.
"""
create_backup = config.data.create_backup
if (
not create_backup.include_addons
and not create_backup.include_all_addons
and not create_backup.include_folders
and not any(a_id.startswith("hassio.") for a_id in create_backup.agent_ids)
):
LOGGER.debug("Backup settings don't need to be adjusted")
return
LOGGER.info(
"Adjusting backup settings to not include addons, folders or supervisor locations"
)
automatic_agents = [
agent_id
for agent_id in create_backup.agent_ids
if not agent_id.startswith("hassio.")
]
if (
self._local_agent_id not in automatic_agents
and "hassio.local" in create_backup.agent_ids
):
automatic_agents = [self._local_agent_id, *automatic_agents]
config.update(
create_backup=CreateBackupParametersDict(
agent_ids=automatic_agents,
include_addons=None,
include_all_addons=False,
include_folders=None,
)
)
def _generate_backup_id(date: str, name: str) -> str:
"""Generate a backup ID."""

View File

@ -16,7 +16,7 @@ if TYPE_CHECKING:
STORE_DELAY_SAVE = 30
STORAGE_KEY = DOMAIN
STORAGE_VERSION = 1
STORAGE_VERSION_MINOR = 3
STORAGE_VERSION_MINOR = 4
class StoredBackupData(TypedDict):
@ -60,6 +60,13 @@ class _BackupStore(Store[StoredBackupData]):
else:
data["config"]["schedule"]["days"] = [state]
data["config"]["schedule"]["recurrence"] = "custom_days"
if old_minor_version < 4:
# Workaround for a bug in frontend which incorrectly set days to 0
# instead of to None for unlimited retention.
if data["config"]["retention"]["copies"] == 0:
data["config"]["retention"]["copies"] = None
if data["config"]["retention"]["days"] == 0:
data["config"]["retention"]["days"] = None
# Note: We allow reading data with major version 2.
# Reject if major version is higher than 2.

View File

@ -104,12 +104,15 @@ def read_backup(backup_path: Path) -> AgentBackup:
bool, homeassistant.get("exclude_database", False)
)
extra_metadata = cast(dict[str, bool | str], data.get("extra", {}))
date = extra_metadata.get("supervisor.backup_request_date", data["date"])
return AgentBackup(
addons=addons,
backup_id=cast(str, data["slug"]),
database_included=database_included,
date=cast(str, data["date"]),
extra_metadata=cast(dict[str, bool | str], data.get("extra", {})),
date=cast(str, date),
extra_metadata=extra_metadata,
folders=folders,
homeassistant_included=homeassistant_included,
homeassistant_version=homeassistant_version,

View File

@ -346,6 +346,7 @@ async def handle_config_info(
)
@callback
@websocket_api.require_admin
@websocket_api.websocket_command(
{
@ -368,8 +369,10 @@ async def handle_config_info(
),
vol.Optional("retention"): vol.Schema(
{
vol.Optional("copies"): vol.Any(int, None),
vol.Optional("days"): vol.Any(int, None),
# Note: We can't use cv.positive_int because it allows 0 even
# though 0 is not positive.
vol.Optional("copies"): vol.Any(vol.All(int, vol.Range(min=1)), None),
vol.Optional("days"): vol.Any(vol.All(int, vol.Range(min=1)), None),
},
),
vol.Optional("schedule"): vol.Schema(
@ -385,8 +388,7 @@ async def handle_config_info(
),
}
)
@websocket_api.async_response
async def handle_config_update(
def handle_config_update(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
@ -396,7 +398,7 @@ async def handle_config_update(
changes = dict(msg)
changes.pop("id")
changes.pop("type")
await manager.config.update(**changes)
manager.config.update(**changes)
connection.send_result(msg["id"])

View File

@ -11,7 +11,11 @@ from typing import Any
from aiohttp import ClientError
from hass_nabucasa import Cloud, CloudError
from hass_nabucasa.api import CloudApiNonRetryableError
from hass_nabucasa.cloud_api import async_files_delete_file, async_files_list
from hass_nabucasa.cloud_api import (
FilesHandlerListEntry,
async_files_delete_file,
async_files_list,
)
from hass_nabucasa.files import FilesError, StorageType, calculate_b64md5
from homeassistant.components.backup import AgentBackup, BackupAgent, BackupAgentError
@ -76,11 +80,6 @@ class CloudBackupAgent(BackupAgent):
self._cloud = cloud
self._hass = hass
@callback
def _get_backup_filename(self) -> str:
"""Return the backup filename."""
return f"{self._cloud.client.prefs.instance_id}.tar"
async def async_download_backup(
self,
backup_id: str,
@ -91,13 +90,13 @@ class CloudBackupAgent(BackupAgent):
:param backup_id: The ID of the backup that was returned in async_list_backups.
:return: An async iterator that yields bytes.
"""
if not await self.async_get_backup(backup_id):
if not (backup := await self._async_get_backup(backup_id)):
raise BackupAgentError("Backup not found")
try:
content = await self._cloud.files.download(
storage_type=StorageType.BACKUP,
filename=self._get_backup_filename(),
filename=backup["Key"],
)
except CloudError as err:
raise BackupAgentError(f"Failed to download backup: {err}") from err
@ -124,7 +123,7 @@ class CloudBackupAgent(BackupAgent):
base64md5hash = await calculate_b64md5(open_stream, size)
except FilesError as err:
raise BackupAgentError(err) from err
filename = self._get_backup_filename()
filename = f"{self._cloud.client.prefs.instance_id}.tar"
metadata = backup.as_dict()
tries = 1
@ -172,29 +171,34 @@ class CloudBackupAgent(BackupAgent):
:param backup_id: The ID of the backup that was returned in async_list_backups.
"""
if not await self.async_get_backup(backup_id):
if not (backup := await self._async_get_backup(backup_id)):
return
try:
await async_files_delete_file(
self._cloud,
storage_type=StorageType.BACKUP,
filename=self._get_backup_filename(),
filename=backup["Key"],
)
except (ClientError, CloudError) as err:
raise BackupAgentError("Failed to delete backup") from err
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
"""List backups."""
backups = await self._async_list_backups()
return [AgentBackup.from_dict(backup["Metadata"]) for backup in backups]
async def _async_list_backups(self) -> list[FilesHandlerListEntry]:
"""List backups."""
try:
backups = await async_files_list(
self._cloud, storage_type=StorageType.BACKUP
)
_LOGGER.debug("Cloud backups: %s", backups)
except (ClientError, CloudError) as err:
raise BackupAgentError("Failed to list backups") from err
return [AgentBackup.from_dict(backup["Metadata"]) for backup in backups]
_LOGGER.debug("Cloud backups: %s", backups)
return backups
async def async_get_backup(
self,
@ -202,10 +206,19 @@ class CloudBackupAgent(BackupAgent):
**kwargs: Any,
) -> AgentBackup | None:
"""Return a backup."""
backups = await self.async_list_backups()
if not (backup := await self._async_get_backup(backup_id)):
return None
return AgentBackup.from_dict(backup["Metadata"])
async def _async_get_backup(
self,
backup_id: str,
) -> FilesHandlerListEntry | None:
"""Return a backup."""
backups = await self._async_list_backups()
for backup in backups:
if backup.backup_id == backup_id:
if backup["Metadata"]["backup_id"] == backup_id:
return backup
return None

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
"iot_class": "cloud_push",
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
"requirements": ["py-sucks==0.9.10", "deebot-client==12.1.0"]
"requirements": ["py-sucks==0.9.10", "deebot-client==12.2.0"]
}

View File

@ -85,6 +85,8 @@ async def async_setup_entry(
class FritzboxThermostat(FritzBoxDeviceEntity, ClimateEntity):
"""The thermostat class for FRITZ!SmartHome thermostats."""
_attr_max_temp = MAX_TEMPERATURE
_attr_min_temp = MIN_TEMPERATURE
_attr_precision = PRECISION_HALVES
_attr_temperature_unit = UnitOfTemperature.CELSIUS
_attr_translation_key = "thermostat"
@ -135,11 +137,13 @@ class FritzboxThermostat(FritzBoxDeviceEntity, ClimateEntity):
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set new target temperature."""
target_temp = kwargs.get(ATTR_TEMPERATURE)
hvac_mode = kwargs.get(ATTR_HVAC_MODE)
if hvac_mode == HVACMode.OFF:
if (hvac_mode := kwargs.get(ATTR_HVAC_MODE)) is HVACMode.OFF:
await self.async_set_hvac_mode(hvac_mode)
elif target_temp is not None:
elif (target_temp := kwargs.get(ATTR_TEMPERATURE)) is not None:
if target_temp == OFF_API_TEMPERATURE:
target_temp = OFF_REPORT_SET_TEMPERATURE
elif target_temp == ON_API_TEMPERATURE:
target_temp = ON_REPORT_SET_TEMPERATURE
await self.hass.async_add_executor_job(
self.data.set_target_temperature, target_temp, True
)
@ -169,12 +173,12 @@ class FritzboxThermostat(FritzBoxDeviceEntity, ClimateEntity):
translation_domain=DOMAIN,
translation_key="change_hvac_while_active_mode",
)
if self.hvac_mode == hvac_mode:
if self.hvac_mode is hvac_mode:
LOGGER.debug(
"%s is already in requested hvac mode %s", self.name, hvac_mode
)
return
if hvac_mode == HVACMode.OFF:
if hvac_mode is HVACMode.OFF:
await self.async_set_temperature(temperature=OFF_REPORT_SET_TEMPERATURE)
else:
if value_scheduled_preset(self.data) == PRESET_ECO:
@ -208,16 +212,6 @@ class FritzboxThermostat(FritzBoxDeviceEntity, ClimateEntity):
elif preset_mode == PRESET_ECO:
await self.async_set_temperature(temperature=self.data.eco_temperature)
@property
def min_temp(self) -> int:
"""Return the minimum temperature."""
return MIN_TEMPERATURE
@property
def max_temp(self) -> int:
"""Return the maximum temperature."""
return MAX_TEMPERATURE
@property
def extra_state_attributes(self) -> ClimateExtraAttributes:
"""Return the device specific state attributes."""

View File

@ -7,7 +7,7 @@
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["pyfritzhome"],
"requirements": ["pyfritzhome==0.6.14"],
"requirements": ["pyfritzhome==0.6.15"],
"ssdp": [
{
"st": "urn:schemas-upnp-org:device:fritzbox:1"

View File

@ -21,5 +21,5 @@
"documentation": "https://www.home-assistant.io/integrations/frontend",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20250214.0"]
"requirements": ["home-assistant-frontend==20250221.0"]
}

View File

@ -27,11 +27,13 @@ from homeassistant.components.backup import (
AddonInfo,
AgentBackup,
BackupAgent,
BackupConfig,
BackupManagerError,
BackupNotFound,
BackupReaderWriter,
BackupReaderWriterError,
CreateBackupEvent,
CreateBackupParametersDict,
CreateBackupStage,
CreateBackupState,
Folder,
@ -633,6 +635,27 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
_LOGGER.debug("Could not get restore job %s: %s", restore_job_id, err)
unsub()
async def async_validate_config(self, *, config: BackupConfig) -> None:
"""Validate backup config.
Replace the core backup agent with the hassio default agent.
"""
core_agent_id = "backup.local"
create_backup = config.data.create_backup
if core_agent_id not in create_backup.agent_ids:
_LOGGER.debug("Backup settings don't need to be adjusted")
return
default_agent = await _default_agent(self._client)
_LOGGER.info("Adjusting backup settings to not include core backup location")
automatic_agents = [
agent_id if agent_id != core_agent_id else default_agent
for agent_id in create_backup.agent_ids
]
config.update(
create_backup=CreateBackupParametersDict(agent_ids=automatic_agents)
)
@callback
def _async_listen_job_events(
self, job_id: UUID, on_event: Callable[[Mapping[str, Any]], None]

View File

@ -9,5 +9,5 @@
},
"iot_class": "cloud_polling",
"loggers": ["apyhiveapi"],
"requirements": ["pyhive-integration==1.0.1"]
"requirements": ["pyhive-integration==1.0.2"]
}

View File

@ -432,6 +432,7 @@ def ws_expose_entity(
@websocket_api.websocket_command(
{
vol.Required("type"): "homeassistant/expose_entity/list",
vol.Optional("assistant"): vol.In(KNOWN_ASSISTANTS),
}
)
def ws_list_exposed_entities(
@ -441,10 +442,18 @@ def ws_list_exposed_entities(
result: dict[str, Any] = {}
exposed_entities = hass.data[DATA_EXPOSED_ENTITIES]
required_assistant = msg.get("assistant")
entity_registry = er.async_get(hass)
for entity_id in chain(exposed_entities.entities, entity_registry.entities):
result[entity_id] = {}
entity_settings = async_get_entity_settings(hass, entity_id)
if required_assistant and (
(required_assistant not in entity_settings)
or (not entity_settings[required_assistant].get("should_expose"))
):
# Not exposed to required assistant
continue
result[entity_id] = {}
for assistant, settings in entity_settings.items():
if "should_expose" not in settings:
continue

View File

@ -107,7 +107,9 @@ class HueLight(HueBaseEntity, LightEntity):
self._attr_effect_list = []
if effects := resource.effects:
self._attr_effect_list = [
x.value for x in effects.status_values if x != EffectStatus.NO_EFFECT
x.value
for x in effects.status_values
if x not in (EffectStatus.NO_EFFECT, EffectStatus.UNKNOWN)
]
if timed_effects := resource.timed_effects:
self._attr_effect_list += [

View File

@ -39,7 +39,7 @@ set_preset_mode_with_end_datetime:
select:
options:
- "away"
- "Frost Guard"
- "frost_guard"
end_datetime:
required: true
example: '"2019-04-20 05:04:20"'

View File

@ -235,7 +235,7 @@ class ONVIFDevice:
LOGGER.debug("%s: Retrieving current device date/time", self.name)
try:
device_time = await device_mgmt.GetSystemDateAndTime()
except RequestError as err:
except (RequestError, Fault) as err:
LOGGER.warning(
"Couldn't get device '%s' date/time. Error: %s", self.name, err
)

View File

@ -4,6 +4,7 @@ from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from datetime import date
from opower import Forecast, MeterType, UnitOfMeasure
@ -29,7 +30,7 @@ from .coordinator import OpowerCoordinator
class OpowerEntityDescription(SensorEntityDescription):
"""Class describing Opower sensors entities."""
value_fn: Callable[[Forecast], str | float]
value_fn: Callable[[Forecast], str | float | date]
# suggested_display_precision=0 for all sensors since
@ -97,7 +98,7 @@ ELEC_SENSORS: tuple[OpowerEntityDescription, ...] = (
device_class=SensorDeviceClass.DATE,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
value_fn=lambda data: str(data.start_date),
value_fn=lambda data: data.start_date,
),
OpowerEntityDescription(
key="elec_end_date",
@ -105,7 +106,7 @@ ELEC_SENSORS: tuple[OpowerEntityDescription, ...] = (
device_class=SensorDeviceClass.DATE,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
value_fn=lambda data: str(data.end_date),
value_fn=lambda data: data.end_date,
),
)
GAS_SENSORS: tuple[OpowerEntityDescription, ...] = (
@ -169,7 +170,7 @@ GAS_SENSORS: tuple[OpowerEntityDescription, ...] = (
device_class=SensorDeviceClass.DATE,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
value_fn=lambda data: str(data.start_date),
value_fn=lambda data: data.start_date,
),
OpowerEntityDescription(
key="gas_end_date",
@ -177,7 +178,7 @@ GAS_SENSORS: tuple[OpowerEntityDescription, ...] = (
device_class=SensorDeviceClass.DATE,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
value_fn=lambda data: str(data.end_date),
value_fn=lambda data: data.end_date,
),
)
@ -247,7 +248,7 @@ class OpowerSensor(CoordinatorEntity[OpowerCoordinator], SensorEntity):
self.utility_account_id = utility_account_id
@property
def native_value(self) -> StateType:
def native_value(self) -> StateType | date:
"""Return the state."""
if self.coordinator.data is not None:
return self.entity_description.value_fn(

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/prosegur",
"iot_class": "cloud_polling",
"loggers": ["pyprosegur"],
"requirements": ["pyprosegur==0.0.9"]
"requirements": ["pyprosegur==0.0.13"]
}

View File

@ -103,10 +103,10 @@ class ReolinkHostCoordinatorEntity(CoordinatorEntity[DataUpdateCoordinator[None]
"""Handle incoming TCP push event."""
self.async_write_ha_state()
def register_callback(self, unique_id: str, cmd_id: int) -> None:
def register_callback(self, callback_id: str, cmd_id: int) -> None:
"""Register callback for TCP push events."""
self._host.api.baichuan.register_callback( # pragma: no cover
unique_id, self._push_callback, cmd_id
callback_id, self._push_callback, cmd_id
)
async def async_added_to_hass(self) -> None:
@ -114,23 +114,25 @@ class ReolinkHostCoordinatorEntity(CoordinatorEntity[DataUpdateCoordinator[None]
await super().async_added_to_hass()
cmd_key = self.entity_description.cmd_key
cmd_id = self.entity_description.cmd_id
callback_id = f"{self.platform.domain}_{self._attr_unique_id}"
if cmd_key is not None:
self._host.async_register_update_cmd(cmd_key)
if cmd_id is not None:
self.register_callback(self._attr_unique_id, cmd_id)
self.register_callback(callback_id, cmd_id)
# Privacy mode
self.register_callback(f"{self._attr_unique_id}_623", 623)
self.register_callback(f"{callback_id}_623", 623)
async def async_will_remove_from_hass(self) -> None:
"""Entity removed."""
cmd_key = self.entity_description.cmd_key
cmd_id = self.entity_description.cmd_id
callback_id = f"{self.platform.domain}_{self._attr_unique_id}"
if cmd_key is not None:
self._host.async_unregister_update_cmd(cmd_key)
if cmd_id is not None:
self._host.api.baichuan.unregister_callback(self._attr_unique_id)
self._host.api.baichuan.unregister_callback(callback_id)
# Privacy mode
self._host.api.baichuan.unregister_callback(f"{self._attr_unique_id}_623")
self._host.api.baichuan.unregister_callback(f"{callback_id}_623")
await super().async_will_remove_from_hass()
@ -189,10 +191,10 @@ class ReolinkChannelCoordinatorEntity(ReolinkHostCoordinatorEntity):
"""Return True if entity is available."""
return super().available and self._host.api.camera_online(self._channel)
def register_callback(self, unique_id: str, cmd_id: int) -> None:
def register_callback(self, callback_id: str, cmd_id: int) -> None:
"""Register callback for TCP push events."""
self._host.api.baichuan.register_callback(
unique_id, self._push_callback, cmd_id, self._channel
callback_id, self._push_callback, cmd_id, self._channel
)
async def async_added_to_hass(self) -> None:

View File

@ -19,5 +19,5 @@
"iot_class": "local_push",
"loggers": ["reolink_aio"],
"quality_scale": "platinum",
"requirements": ["reolink-aio==0.11.10"]
"requirements": ["reolink-aio==0.12.0"]
}

View File

@ -71,7 +71,7 @@ class ReolinkVODMediaSource(MediaSource):
host = get_host(self.hass, config_entry_id)
def get_vod_type() -> VodRequestType:
if filename.endswith(".mp4"):
if filename.endswith((".mp4", ".vref")):
if host.api.is_nvr:
return VodRequestType.DOWNLOAD
return VodRequestType.PLAYBACK

View File

@ -5,5 +5,5 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/rympro",
"iot_class": "cloud_polling",
"requirements": ["pyrympro==0.0.8"]
"requirements": ["pyrympro==0.0.9"]
}

View File

@ -19,7 +19,7 @@
"delivered": {
"default": "mdi:package"
},
"returned": {
"alert": {
"default": "mdi:package"
},
"package": {

View File

@ -11,7 +11,7 @@ get_packages:
- "ready_to_be_picked_up"
- "undelivered"
- "delivered"
- "returned"
- "alert"
translation_key: package_state
config_entry_id:
required: true

View File

@ -57,8 +57,8 @@
"delivered": {
"name": "Delivered"
},
"returned": {
"name": "Returned"
"alert": {
"name": "Alert"
},
"package": {
"name": "Package {name}"
@ -104,7 +104,7 @@
"ready_to_be_picked_up": "[%key:component::seventeentrack::entity::sensor::ready_to_be_picked_up::name%]",
"undelivered": "[%key:component::seventeentrack::entity::sensor::undelivered::name%]",
"delivered": "[%key:component::seventeentrack::entity::sensor::delivered::name%]",
"returned": "[%key:component::seventeentrack::entity::sensor::returned::name%]"
"alert": "[%key:component::seventeentrack::entity::sensor::alert::name%]"
}
}
}

View File

@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["pymodbus", "pysmarty2"],
"requirements": ["pysmarty2==0.10.1"]
"requirements": ["pysmarty2==0.10.2"]
}

View File

@ -170,6 +170,7 @@ MODELS_TV_ONLY = (
"BEAM",
"PLAYBAR",
"PLAYBASE",
"ULTRA",
)
MODELS_LINEIN_AND_TV = ("AMP",)

View File

@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/tesla_fleet",
"iot_class": "cloud_polling",
"loggers": ["tesla-fleet-api"],
"requirements": ["tesla-fleet-api==0.9.8"]
"requirements": ["tesla-fleet-api==0.9.10"]
}

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/teslemetry",
"iot_class": "cloud_polling",
"loggers": ["tesla-fleet-api"],
"requirements": ["tesla-fleet-api==0.9.8", "teslemetry-stream==0.6.6"]
"requirements": ["tesla-fleet-api==0.9.10", "teslemetry-stream==0.6.6"]
}

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/tessie",
"iot_class": "cloud_polling",
"loggers": ["tessie", "tesla-fleet-api"],
"requirements": ["tessie-api==0.1.1", "tesla-fleet-api==0.9.8"]
"requirements": ["tessie-api==0.1.1", "tesla-fleet-api==0.9.10"]
}

View File

@ -256,7 +256,7 @@ BINARY_SENSORS: dict[str, tuple[TuyaBinarySensorEntityDescription, ...]] = {
TuyaBinarySensorEntityDescription(
key=DPCode.WATERSENSOR_STATE,
device_class=BinarySensorDeviceClass.MOISTURE,
on_value="alarm",
on_value={"1", "alarm"},
),
TAMPER_BINARY_SENSOR,
),

View File

@ -12,5 +12,5 @@
"documentation": "https://www.home-assistant.io/integrations/vesync",
"iot_class": "cloud_polling",
"loggers": ["pyvesync"],
"requirements": ["pyvesync==2.1.17"]
"requirements": ["pyvesync==2.1.18"]
}

View File

@ -25,7 +25,7 @@ if TYPE_CHECKING:
APPLICATION_NAME: Final = "HomeAssistant"
MAJOR_VERSION: Final = 2025
MINOR_VERSION: Final = 2
PATCH_VERSION: Final = "4"
PATCH_VERSION: Final = "5"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 0)

View File

@ -37,7 +37,7 @@ habluetooth==3.21.1
hass-nabucasa==0.92.0
hassil==2.2.3
home-assistant-bluetooth==1.13.0
home-assistant-frontend==20250214.0
home-assistant-frontend==20250221.0
home-assistant-intents==2025.2.5
httpx==0.28.1
ifaddr==0.2.0

View File

@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "homeassistant"
version = "2025.2.4"
version = "2025.2.5"
license = {text = "Apache-2.0"}
description = "Open-source home automation platform running on Python 3."
readme = "README.rst"

22
requirements_all.txt generated
View File

@ -428,7 +428,7 @@ aiowithings==3.1.5
aioymaps==1.2.5
# homeassistant.components.airgradient
airgradient==0.9.1
airgradient==0.9.2
# homeassistant.components.airly
airly==1.1.0
@ -747,7 +747,7 @@ debugpy==1.8.11
# decora==0.6
# homeassistant.components.ecovacs
deebot-client==12.1.0
deebot-client==12.2.0
# homeassistant.components.ihc
# homeassistant.components.namecheapdns
@ -1143,7 +1143,7 @@ hole==0.8.0
holidays==0.66
# homeassistant.components.frontend
home-assistant-frontend==20250214.0
home-assistant-frontend==20250221.0
# homeassistant.components.conversation
home-assistant-intents==2025.2.5
@ -1969,7 +1969,7 @@ pyforked-daapd==0.1.14
pyfreedompro==1.1.0
# homeassistant.components.fritzbox
pyfritzhome==0.6.14
pyfritzhome==0.6.15
# homeassistant.components.ifttt
pyfttt==0.3
@ -1990,7 +1990,7 @@ pyhaversion==22.8.0
pyheos==1.0.2
# homeassistant.components.hive
pyhive-integration==1.0.1
pyhive-integration==1.0.2
# homeassistant.components.homematic
pyhomematic==0.1.77
@ -2217,7 +2217,7 @@ pypoint==3.0.0
pyprof2calltree==1.4.5
# homeassistant.components.prosegur
pyprosegur==0.0.9
pyprosegur==0.0.13
# homeassistant.components.prusalink
pyprusalink==2.1.1
@ -2253,7 +2253,7 @@ pyrituals==0.0.6
pyroute2==0.7.5
# homeassistant.components.rympro
pyrympro==0.0.8
pyrympro==0.0.9
# homeassistant.components.sabnzbd
pysabnzbd==1.1.1
@ -2304,7 +2304,7 @@ pysmartapp==0.3.5
pysmartthings==0.7.8
# homeassistant.components.smarty
pysmarty2==0.10.1
pysmarty2==0.10.2
# homeassistant.components.edl21
pysml==0.0.12
@ -2513,7 +2513,7 @@ pyvera==0.3.15
pyversasense==0.0.6
# homeassistant.components.vesync
pyvesync==2.1.17
pyvesync==2.1.18
# homeassistant.components.vizio
pyvizio==0.1.61
@ -2603,7 +2603,7 @@ renault-api==0.2.9
renson-endura-delta==1.7.2
# homeassistant.components.reolink
reolink-aio==0.11.10
reolink-aio==0.12.0
# homeassistant.components.idteck_prox
rfk101py==0.0.1
@ -2854,7 +2854,7 @@ temperusb==1.6.1
# homeassistant.components.tesla_fleet
# homeassistant.components.teslemetry
# homeassistant.components.tessie
tesla-fleet-api==0.9.8
tesla-fleet-api==0.9.10
# homeassistant.components.powerwall
tesla-powerwall==0.5.2

View File

@ -410,7 +410,7 @@ aiowithings==3.1.5
aioymaps==1.2.5
# homeassistant.components.airgradient
airgradient==0.9.1
airgradient==0.9.2
# homeassistant.components.airly
airly==1.1.0
@ -637,7 +637,7 @@ dbus-fast==2.33.0
debugpy==1.8.11
# homeassistant.components.ecovacs
deebot-client==12.1.0
deebot-client==12.2.0
# homeassistant.components.ihc
# homeassistant.components.namecheapdns
@ -972,7 +972,7 @@ hole==0.8.0
holidays==0.66
# homeassistant.components.frontend
home-assistant-frontend==20250214.0
home-assistant-frontend==20250221.0
# homeassistant.components.conversation
home-assistant-intents==2025.2.5
@ -1604,7 +1604,7 @@ pyforked-daapd==0.1.14
pyfreedompro==1.1.0
# homeassistant.components.fritzbox
pyfritzhome==0.6.14
pyfritzhome==0.6.15
# homeassistant.components.ifttt
pyfttt==0.3
@ -1619,7 +1619,7 @@ pyhaversion==22.8.0
pyheos==1.0.2
# homeassistant.components.hive
pyhive-integration==1.0.1
pyhive-integration==1.0.2
# homeassistant.components.homematic
pyhomematic==0.1.77
@ -1810,7 +1810,7 @@ pypoint==3.0.0
pyprof2calltree==1.4.5
# homeassistant.components.prosegur
pyprosegur==0.0.9
pyprosegur==0.0.13
# homeassistant.components.prusalink
pyprusalink==2.1.1
@ -1837,7 +1837,7 @@ pyrituals==0.0.6
pyroute2==0.7.5
# homeassistant.components.rympro
pyrympro==0.0.8
pyrympro==0.0.9
# homeassistant.components.sabnzbd
pysabnzbd==1.1.1
@ -1876,7 +1876,7 @@ pysmartapp==0.3.5
pysmartthings==0.7.8
# homeassistant.components.smarty
pysmarty2==0.10.1
pysmarty2==0.10.2
# homeassistant.components.edl21
pysml==0.0.12
@ -2031,7 +2031,7 @@ pyuptimerobot==22.2.0
pyvera==0.3.15
# homeassistant.components.vesync
pyvesync==2.1.17
pyvesync==2.1.18
# homeassistant.components.vizio
pyvizio==0.1.61
@ -2106,7 +2106,7 @@ renault-api==0.2.9
renson-endura-delta==1.7.2
# homeassistant.components.reolink
reolink-aio==0.11.10
reolink-aio==0.12.0
# homeassistant.components.rflink
rflink==0.0.66
@ -2294,7 +2294,7 @@ temperusb==1.6.1
# homeassistant.components.tesla_fleet
# homeassistant.components.teslemetry
# homeassistant.components.tessie
tesla-fleet-api==0.9.8
tesla-fleet-api==0.9.10
# homeassistant.components.powerwall
tesla-powerwall==0.5.2

View File

@ -1821,23 +1821,6 @@ async def snapshot_platform(
assert state == snapshot(name=f"{entity_entry.entity_id}-state")
def reset_translation_cache(hass: HomeAssistant, components: list[str]) -> None:
"""Reset translation cache for specified components.
Use this if you are mocking a core component (for example via
mock_integration), to ensure that the mocked translations are not
persisted in the shared session cache.
"""
translations_cache = translation._async_get_translations_cache(hass)
for loaded_components in translations_cache.cache_data.loaded.values():
for component_to_unload in components:
loaded_components.discard(component_to_unload)
for loaded_categories in translations_cache.cache_data.cache.values():
for loaded_components in loaded_categories.values():
for component_to_unload in components:
loaded_components.pop(component_to_unload, None)
@lru_cache
def get_quality_scale(integration: str) -> dict[str, QualityScaleStatus]:
"""Load quality scale for integration."""

View File

@ -25,13 +25,13 @@
'nitrogen_index': 1,
'pm003_count': 270,
'pm01': 22,
'pm02': 34,
'pm02': 34.0,
'pm10': 41,
'raw_ambient_temperature': 27.96,
'raw_nitrogen': 16931,
'raw_nitrogen': 16931.0,
'raw_pm02': 34,
'raw_relative_humidity': 48.0,
'raw_total_volatile_organic_component': 31792,
'raw_total_volatile_organic_component': 31792.0,
'rco2': 778,
'relative_humidity': 47.0,
'serial_number': '84fce612f5b8',

View File

@ -710,7 +710,7 @@
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '34',
'state': '34.0',
})
# ---
# name: test_all_entities[indoor][sensor.airgradient_raw_nox-entry]
@ -760,7 +760,7 @@
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '16931',
'state': '16931.0',
})
# ---
# name: test_all_entities[indoor][sensor.airgradient_raw_pm2_5-entry]
@ -861,7 +861,7 @@
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '31792',
'state': '31792.0',
})
# ---
# name: test_all_entities[indoor][sensor.airgradient_signal_strength-entry]
@ -1255,7 +1255,7 @@
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '16359',
'state': '16359.0',
})
# ---
# name: test_all_entities[outdoor][sensor.airgradient_raw_voc-entry]
@ -1305,7 +1305,7 @@
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '30802',
'state': '30802.0',
})
# ---
# name: test_all_entities[outdoor][sensor.airgradient_signal_strength-entry]

View File

@ -39,7 +39,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -84,11 +84,100 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
# name: test_store_migration[store_data1]
dict({
'data': dict({
'backups': list([
dict({
'backup_id': 'abc123',
'failed_agent_ids': list([
'test.remote',
]),
}),
]),
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
]),
'include_addons': None,
'include_all_addons': False,
'include_database': True,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'state': 'never',
'time': None,
}),
}),
}),
'key': 'backup',
'minor_version': 4,
'version': 1,
})
# ---
# name: test_store_migration[store_data1].1
dict({
'data': dict({
'backups': list([
dict({
'backup_id': 'abc123',
'failed_agent_ids': list([
'test.remote',
]),
}),
]),
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
'test-agent',
]),
'include_addons': None,
'include_all_addons': False,
'include_database': True,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'state': 'never',
'time': None,
}),
}),
}),
'key': 'backup',
'minor_version': 4,
'version': 1,
})
# ---
# name: test_store_migration[store_data2]
dict({
'data': dict({
'backups': list([
@ -131,11 +220,11 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
# name: test_store_migration[store_data1].1
# name: test_store_migration[store_data2].1
dict({
'data': dict({
'backups': list([
@ -179,7 +268,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---

View File

@ -251,7 +251,7 @@
'type': 'result',
})
# ---
# name: test_config_info[storage_data0]
# name: test_config_load_config_info[with_hassio-storage_data0]
dict({
'id': 1,
'result': dict({
@ -288,7 +288,7 @@
'type': 'result',
})
# ---
# name: test_config_info[storage_data1]
# name: test_config_load_config_info[with_hassio-storage_data1]
dict({
'id': 1,
'result': dict({
@ -337,7 +337,7 @@
'type': 'result',
})
# ---
# name: test_config_info[storage_data2]
# name: test_config_load_config_info[with_hassio-storage_data2]
dict({
'id': 1,
'result': dict({
@ -375,7 +375,7 @@
'type': 'result',
})
# ---
# name: test_config_info[storage_data3]
# name: test_config_load_config_info[with_hassio-storage_data3]
dict({
'id': 1,
'result': dict({
@ -413,7 +413,7 @@
'type': 'result',
})
# ---
# name: test_config_info[storage_data4]
# name: test_config_load_config_info[with_hassio-storage_data4]
dict({
'id': 1,
'result': dict({
@ -452,7 +452,7 @@
'type': 'result',
})
# ---
# name: test_config_info[storage_data5]
# name: test_config_load_config_info[with_hassio-storage_data5]
dict({
'id': 1,
'result': dict({
@ -490,7 +490,7 @@
'type': 'result',
})
# ---
# name: test_config_info[storage_data6]
# name: test_config_load_config_info[with_hassio-storage_data6]
dict({
'id': 1,
'result': dict({
@ -530,7 +530,7 @@
'type': 'result',
})
# ---
# name: test_config_info[storage_data7]
# name: test_config_load_config_info[with_hassio-storage_data7]
dict({
'id': 1,
'result': dict({
@ -576,6 +576,484 @@
'type': 'result',
})
# ---
# name: test_config_load_config_info[with_hassio-storage_data8]
dict({
'id': 1,
'result': dict({
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
'hassio.local',
'hassio.share',
'test-agent',
]),
'include_addons': None,
'include_all_addons': False,
'include_database': False,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'next_automatic_backup': None,
'next_automatic_backup_additional': False,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'time': None,
}),
}),
}),
'success': True,
'type': 'result',
})
# ---
# name: test_config_load_config_info[with_hassio-storage_data9]
dict({
'id': 1,
'result': dict({
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
'hassio.local',
'test-agent',
]),
'include_addons': None,
'include_all_addons': False,
'include_database': False,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'next_automatic_backup': None,
'next_automatic_backup_additional': False,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'time': None,
}),
}),
}),
'success': True,
'type': 'result',
})
# ---
# name: test_config_load_config_info[without_hassio-storage_data0]
dict({
'id': 1,
'result': dict({
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
]),
'include_addons': None,
'include_all_addons': False,
'include_database': True,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'next_automatic_backup': None,
'next_automatic_backup_additional': False,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'time': None,
}),
}),
}),
'success': True,
'type': 'result',
})
# ---
# name: test_config_load_config_info[without_hassio-storage_data1]
dict({
'id': 1,
'result': dict({
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
'test-agent',
]),
'include_addons': None,
'include_all_addons': False,
'include_database': True,
'include_folders': None,
'name': 'test-name',
'password': 'test-password',
}),
'last_attempted_automatic_backup': '2024-10-26T04:45:00+01:00',
'last_completed_automatic_backup': '2024-10-26T04:45:00+01:00',
'next_automatic_backup': '2024-11-14T04:55:00+01:00',
'next_automatic_backup_additional': False,
'retention': dict({
'copies': 3,
'days': 7,
}),
'schedule': dict({
'days': list([
'mon',
'tue',
'wed',
'thu',
'fri',
'sat',
'sun',
]),
'recurrence': 'custom_days',
'time': None,
}),
}),
}),
'success': True,
'type': 'result',
})
# ---
# name: test_config_load_config_info[without_hassio-storage_data2]
dict({
'id': 1,
'result': dict({
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
'test-agent',
]),
'include_addons': None,
'include_all_addons': False,
'include_database': False,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'next_automatic_backup': None,
'next_automatic_backup_additional': False,
'retention': dict({
'copies': 3,
'days': None,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'time': None,
}),
}),
}),
'success': True,
'type': 'result',
})
# ---
# name: test_config_load_config_info[without_hassio-storage_data3]
dict({
'id': 1,
'result': dict({
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
'test-agent',
]),
'include_addons': None,
'include_all_addons': False,
'include_database': False,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': '2024-10-27T04:45:00+01:00',
'last_completed_automatic_backup': '2024-10-26T04:45:00+01:00',
'next_automatic_backup': None,
'next_automatic_backup_additional': False,
'retention': dict({
'copies': None,
'days': 7,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'time': None,
}),
}),
}),
'success': True,
'type': 'result',
})
# ---
# name: test_config_load_config_info[without_hassio-storage_data4]
dict({
'id': 1,
'result': dict({
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
'test-agent',
]),
'include_addons': None,
'include_all_addons': False,
'include_database': False,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'next_automatic_backup': '2024-11-18T04:55:00+01:00',
'next_automatic_backup_additional': False,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
'mon',
]),
'recurrence': 'custom_days',
'time': None,
}),
}),
}),
'success': True,
'type': 'result',
})
# ---
# name: test_config_load_config_info[without_hassio-storage_data5]
dict({
'id': 1,
'result': dict({
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
'test-agent',
]),
'include_addons': None,
'include_all_addons': False,
'include_database': False,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'next_automatic_backup': None,
'next_automatic_backup_additional': False,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'time': None,
}),
}),
}),
'success': True,
'type': 'result',
})
# ---
# name: test_config_load_config_info[without_hassio-storage_data6]
dict({
'id': 1,
'result': dict({
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
'test-agent',
]),
'include_addons': None,
'include_all_addons': False,
'include_database': False,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'next_automatic_backup': '2024-11-17T04:55:00+01:00',
'next_automatic_backup_additional': False,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
'mon',
'sun',
]),
'recurrence': 'custom_days',
'time': None,
}),
}),
}),
'success': True,
'type': 'result',
})
# ---
# name: test_config_load_config_info[without_hassio-storage_data7]
dict({
'id': 1,
'result': dict({
'config': dict({
'agents': dict({
'test-agent1': dict({
'protected': True,
}),
'test-agent2': dict({
'protected': False,
}),
}),
'create_backup': dict({
'agent_ids': list([
'test-agent',
]),
'include_addons': None,
'include_all_addons': False,
'include_database': False,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'next_automatic_backup': '2024-11-17T04:55:00+01:00',
'next_automatic_backup_additional': False,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
'mon',
'sun',
]),
'recurrence': 'custom_days',
'time': None,
}),
}),
}),
'success': True,
'type': 'result',
})
# ---
# name: test_config_load_config_info[without_hassio-storage_data8]
dict({
'id': 1,
'result': dict({
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
'backup.local',
'test-agent',
]),
'include_addons': None,
'include_all_addons': False,
'include_database': False,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'next_automatic_backup': None,
'next_automatic_backup_additional': False,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'time': None,
}),
}),
}),
'success': True,
'type': 'result',
})
# ---
# name: test_config_load_config_info[without_hassio-storage_data9]
dict({
'id': 1,
'result': dict({
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
'backup.local',
'test-agent',
]),
'include_addons': None,
'include_all_addons': False,
'include_database': False,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'next_automatic_backup': None,
'next_automatic_backup_additional': False,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'time': None,
}),
}),
}),
'success': True,
'type': 'result',
})
# ---
# name: test_config_update[commands0]
dict({
'id': 1,
@ -686,7 +1164,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -800,7 +1278,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -914,7 +1392,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -1038,7 +1516,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -1205,7 +1683,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -1319,7 +1797,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -1435,7 +1913,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -1549,7 +2027,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -1667,7 +2145,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -1789,7 +2267,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -1903,7 +2381,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -2017,7 +2495,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -2131,7 +2609,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -2245,7 +2723,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -2323,6 +2801,154 @@
'type': 'result',
})
# ---
# name: test_config_update_errors[command10]
dict({
'id': 1,
'result': dict({
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
]),
'include_addons': None,
'include_all_addons': False,
'include_database': True,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'next_automatic_backup': None,
'next_automatic_backup_additional': False,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'time': None,
}),
}),
}),
'success': True,
'type': 'result',
})
# ---
# name: test_config_update_errors[command10].1
dict({
'id': 3,
'result': dict({
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
]),
'include_addons': None,
'include_all_addons': False,
'include_database': True,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'next_automatic_backup': None,
'next_automatic_backup_additional': False,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'time': None,
}),
}),
}),
'success': True,
'type': 'result',
})
# ---
# name: test_config_update_errors[command11]
dict({
'id': 1,
'result': dict({
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
]),
'include_addons': None,
'include_all_addons': False,
'include_database': True,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'next_automatic_backup': None,
'next_automatic_backup_additional': False,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'time': None,
}),
}),
}),
'success': True,
'type': 'result',
})
# ---
# name: test_config_update_errors[command11].1
dict({
'id': 3,
'result': dict({
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
]),
'include_addons': None,
'include_all_addons': False,
'include_database': True,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'next_automatic_backup': None,
'next_automatic_backup_additional': False,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'time': None,
}),
}),
}),
'success': True,
'type': 'result',
})
# ---
# name: test_config_update_errors[command1]
dict({
'id': 1,

View File

@ -57,6 +57,38 @@ def mock_delay_save() -> Generator[None]:
"key": DOMAIN,
"version": 1,
},
{
"data": {
"backups": [
{
"backup_id": "abc123",
"failed_agent_ids": ["test.remote"],
}
],
"config": {
"create_backup": {
"agent_ids": [],
"include_addons": None,
"include_all_addons": False,
"include_database": True,
"include_folders": None,
"name": None,
"password": None,
},
"last_attempted_automatic_backup": None,
"last_completed_automatic_backup": None,
"retention": {
"copies": 0,
"days": 0,
},
"schedule": {
"state": "never",
},
},
},
"key": DOMAIN,
"version": 1,
},
{
"data": {
"backups": [

View File

@ -89,6 +89,28 @@ from tests.common import get_fixture_path
size=1234,
),
),
# Check the backup_request_date is used as date if present
(
b'{"compressed":true,"date":"2024-12-01T00:00:00.000000-00:00","homeassistant":'
b'{"exclude_database":true,"version":"2024.12.0.dev0"},"name":"test",'
b'"extra":{"supervisor.backup_request_date":"2025-12-01T00:00:00.000000-00:00"},'
b'"protected":true,"slug":"455645fe","type":"partial","version":2}',
AgentBackup(
addons=[],
backup_id="455645fe",
date="2025-12-01T00:00:00.000000-00:00",
database_included=False,
extra_metadata={
"supervisor.backup_request_date": "2025-12-01T00:00:00.000000-00:00"
},
folders=[],
homeassistant_included=True,
homeassistant_version="2024.12.0.dev0",
name="test",
protected=True,
size=1234,
),
),
],
)
def test_read_backup(backup_json_content: bytes, expected_backup: AgentBackup) -> None:

View File

@ -46,10 +46,10 @@ BACKUP_CALL = call(
agent_ids=["test.test-agent"],
backup_name="test-name",
extra_metadata={"instance_id": ANY, "with_automatic_settings": True},
include_addons=["test-addon"],
include_addons=[],
include_all_addons=False,
include_database=True,
include_folders=["media"],
include_folders=None,
include_homeassistant=True,
password="test-password",
on_progress=ANY,
@ -1126,25 +1126,96 @@ async def test_agents_info(
"minor_version": store.STORAGE_VERSION_MINOR,
},
},
{
"backup": {
"data": {
"backups": [],
"config": {
"agents": {},
"create_backup": {
"agent_ids": ["hassio.local", "hassio.share", "test-agent"],
"include_addons": None,
"include_all_addons": False,
"include_database": False,
"include_folders": None,
"name": None,
"password": None,
},
"retention": {"copies": None, "days": None},
"last_attempted_automatic_backup": None,
"last_completed_automatic_backup": None,
"schedule": {
"days": [],
"recurrence": "never",
"state": "never",
"time": None,
},
},
},
"key": DOMAIN,
"version": store.STORAGE_VERSION,
"minor_version": store.STORAGE_VERSION_MINOR,
},
},
{
"backup": {
"data": {
"backups": [],
"config": {
"agents": {},
"create_backup": {
"agent_ids": ["backup.local", "test-agent"],
"include_addons": None,
"include_all_addons": False,
"include_database": False,
"include_folders": None,
"name": None,
"password": None,
},
"retention": {"copies": None, "days": None},
"last_attempted_automatic_backup": None,
"last_completed_automatic_backup": None,
"schedule": {
"days": [],
"recurrence": "never",
"state": "never",
"time": None,
},
},
},
"key": DOMAIN,
"version": store.STORAGE_VERSION,
"minor_version": store.STORAGE_VERSION_MINOR,
},
},
],
)
@pytest.mark.parametrize(
("with_hassio"),
[
pytest.param(True, id="with_hassio"),
pytest.param(False, id="without_hassio"),
],
)
@pytest.mark.usefixtures("supervisor_client")
@patch("homeassistant.components.backup.config.random.randint", Mock(return_value=600))
async def test_config_info(
async def test_config_load_config_info(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
freezer: FrozenDateTimeFactory,
snapshot: SnapshotAssertion,
hass_storage: dict[str, Any],
with_hassio: bool,
storage_data: dict[str, Any] | None,
) -> None:
"""Test getting backup config info."""
"""Test loading stored backup config and reading it via config/info."""
client = await hass_ws_client(hass)
await hass.config.async_set_time_zone("Europe/Amsterdam")
freezer.move_to("2024-11-13T12:01:00+01:00")
hass_storage.update(storage_data)
await setup_backup_integration(hass)
await setup_backup_integration(hass, with_hassio=with_hassio)
await hass.async_block_till_done()
await client.send_json_auto_id({"type": "backup/config/info"})
@ -1366,6 +1437,14 @@ async def test_config_update(
"type": "backup/config/update",
"agents": {"test-agent1": {"favorite": True}},
},
{
"type": "backup/config/update",
"retention": {"copies": 0},
},
{
"type": "backup/config/update",
"retention": {"days": 0},
},
],
)
async def test_config_update_errors(
@ -1702,10 +1781,10 @@ async def test_config_schedule_logic(
"agents": {},
"create_backup": {
"agent_ids": ["test.test-agent"],
"include_addons": ["test-addon"],
"include_addons": [],
"include_all_addons": False,
"include_database": True,
"include_folders": ["media"],
"include_folders": [],
"name": "test-name",
"password": "test-password",
},
@ -2163,7 +2242,7 @@ async def test_config_schedule_logic(
{
"type": "backup/config/update",
"create_backup": {"agent_ids": ["test.test-agent"]},
"retention": {"copies": 0, "days": None},
"retention": {"copies": 1, "days": None},
"schedule": {"recurrence": "daily"},
},
{
@ -2237,7 +2316,7 @@ async def test_config_schedule_logic(
{
"type": "backup/config/update",
"create_backup": {"agent_ids": ["test.test-agent"]},
"retention": {"copies": 0, "days": None},
"retention": {"copies": 1, "days": None},
"schedule": {"recurrence": "daily"},
},
{
@ -2306,7 +2385,7 @@ async def test_config_schedule_logic(
{
"type": "backup/config/update",
"create_backup": {"agent_ids": ["test.test-agent"]},
"retention": {"copies": 0, "days": None},
"retention": {"copies": 1, "days": None},
"schedule": {"recurrence": "daily"},
},
{
@ -3027,7 +3106,7 @@ async def test_config_retention_copies_logic_manual_backup(
{
"type": "backup/config/update",
"create_backup": {"agent_ids": ["test-agent"]},
"retention": {"copies": None, "days": 0},
"retention": {"copies": None, "days": 1},
"schedule": {"recurrence": "never"},
}
],

View File

@ -3,12 +3,12 @@
from collections.abc import AsyncGenerator, Generator
from io import StringIO
from typing import Any
from unittest.mock import Mock, PropertyMock, patch
from unittest.mock import ANY, Mock, PropertyMock, patch
from aiohttp import ClientError
from hass_nabucasa import CloudError
from hass_nabucasa.api import CloudApiNonRetryableError
from hass_nabucasa.files import FilesError
from hass_nabucasa.files import FilesError, StorageType
import pytest
from homeassistant.components.backup import (
@ -90,7 +90,26 @@ def mock_list_files() -> Generator[MagicMock]:
"size": 34519040,
"storage-type": "backup",
},
}
},
{
"Key": "462e16810d6841228828d9dd2f9e341f.tar",
"LastModified": "2024-11-22T10:49:01.182Z",
"Size": 34519040,
"Metadata": {
"addons": [],
"backup_id": "23e64aed",
"date": "2024-11-22T11:48:48.727189+01:00",
"database_included": True,
"extra_metadata": {},
"folders": [],
"homeassistant_included": True,
"homeassistant_version": "2024.12.0.dev0",
"name": "Core 2024.12.0.dev0",
"protected": False,
"size": 34519040,
"storage-type": "backup",
},
},
]
yield list_files
@ -148,7 +167,21 @@ async def test_agents_list_backups(
"name": "Core 2024.12.0.dev0",
"failed_agent_ids": [],
"with_automatic_settings": None,
}
},
{
"addons": [],
"agents": {"cloud.cloud": {"protected": False, "size": 34519040}},
"backup_id": "23e64aed",
"date": "2024-11-22T11:48:48.727189+01:00",
"database_included": True,
"extra_metadata": {},
"folders": [],
"homeassistant_included": True,
"homeassistant_version": "2024.12.0.dev0",
"name": "Core 2024.12.0.dev0",
"failed_agent_ids": [],
"with_automatic_settings": None,
},
]
@ -242,6 +275,10 @@ async def test_agents_download(
resp = await client.get(f"/api/backup/download/{backup_id}?agent_id=cloud.cloud")
assert resp.status == 200
assert await resp.content.read() == b"backup data"
cloud.files.download.assert_called_once_with(
filename="462e16810d6841228828d9dd2f9e341e.tar",
storage_type=StorageType.BACKUP,
)
@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files")
@ -317,7 +354,14 @@ async def test_agents_upload(
data={"file": StringIO(backup_data)},
)
assert len(cloud.files.upload.mock_calls) == 1
cloud.files.upload.assert_called_once_with(
storage_type=StorageType.BACKUP,
open_stream=ANY,
filename=f"{cloud.client.prefs.instance_id}.tar",
base64md5hash=ANY,
metadata=ANY,
size=ANY,
)
metadata = cloud.files.upload.mock_calls[-1].kwargs["metadata"]
assert metadata["backup_id"] == backup_id
@ -552,6 +596,7 @@ async def test_agents_upload_wrong_size(
async def test_agents_delete(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
cloud: Mock,
mock_delete_file: Mock,
) -> None:
"""Test agent delete backup."""
@ -568,7 +613,11 @@ async def test_agents_delete(
assert response["success"]
assert response["result"] == {"agent_errors": {}}
mock_delete_file.assert_called_once()
mock_delete_file.assert_called_once_with(
cloud,
filename="462e16810d6841228828d9dd2f9e341e.tar",
storage_type=StorageType.BACKUP,
)
@pytest.mark.parametrize("side_effect", [ClientError, CloudError])

View File

@ -529,6 +529,7 @@ def resolution_suggestions_for_issue_fixture(supervisor_client: AsyncMock) -> As
def supervisor_client() -> Generator[AsyncMock]:
"""Mock the supervisor client."""
mounts_info_mock = AsyncMock(spec_set=["default_backup_mount", "mounts"])
mounts_info_mock.default_backup_mount = None
mounts_info_mock.mounts = []
supervisor_client = AsyncMock()
supervisor_client.addons = AsyncMock()

View File

@ -23,7 +23,12 @@ from homeassistant.components.climate import (
SERVICE_SET_TEMPERATURE,
HVACMode,
)
from homeassistant.components.fritzbox.climate import PRESET_HOLIDAY, PRESET_SUMMER
from homeassistant.components.fritzbox.climate import (
OFF_API_TEMPERATURE,
ON_API_TEMPERATURE,
PRESET_HOLIDAY,
PRESET_SUMMER,
)
from homeassistant.components.fritzbox.const import (
ATTR_STATE_BATTERY_LOW,
ATTR_STATE_HOLIDAY_MODE,
@ -367,9 +372,23 @@ async def test_set_hvac_mode(
assert device.set_target_temperature.call_args_list == expected_call_args
async def test_set_preset_mode_comfort(hass: HomeAssistant, fritz: Mock) -> None:
@pytest.mark.parametrize(
("comfort_temperature", "expected_call_args"),
[
(20, [call(20, True)]),
(28, [call(28, True)]),
(ON_API_TEMPERATURE, [call(30, True)]),
],
)
async def test_set_preset_mode_comfort(
hass: HomeAssistant,
fritz: Mock,
comfort_temperature: int,
expected_call_args: list[_Call],
) -> None:
"""Test setting preset mode."""
device = FritzDeviceClimateMock()
device.comfort_temperature = comfort_temperature
assert await setup_config_entry(
hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz
)
@ -380,12 +399,27 @@ async def test_set_preset_mode_comfort(hass: HomeAssistant, fritz: Mock) -> None
{ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: PRESET_COMFORT},
True,
)
assert device.set_target_temperature.call_args_list == [call(22, True)]
assert device.set_target_temperature.call_count == len(expected_call_args)
assert device.set_target_temperature.call_args_list == expected_call_args
async def test_set_preset_mode_eco(hass: HomeAssistant, fritz: Mock) -> None:
@pytest.mark.parametrize(
("eco_temperature", "expected_call_args"),
[
(20, [call(20, True)]),
(16, [call(16, True)]),
(OFF_API_TEMPERATURE, [call(0, True)]),
],
)
async def test_set_preset_mode_eco(
hass: HomeAssistant,
fritz: Mock,
eco_temperature: int,
expected_call_args: list[_Call],
) -> None:
"""Test setting preset mode."""
device = FritzDeviceClimateMock()
device.eco_temperature = eco_temperature
assert await setup_config_entry(
hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz
)
@ -396,7 +430,8 @@ async def test_set_preset_mode_eco(hass: HomeAssistant, fritz: Mock) -> None:
{ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: PRESET_ECO},
True,
)
assert device.set_target_temperature.call_args_list == [call(16, True)]
assert device.set_target_temperature.call_count == len(expected_call_args)
assert device.set_target_temperature.call_args_list == expected_call_args
async def test_preset_mode_update(hass: HomeAssistant, fritz: Mock) -> None:

View File

@ -0,0 +1,130 @@
# serializer version: 1
# name: test_config_load_config_info[storage_data0]
dict({
'id': 1,
'result': dict({
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
]),
'include_addons': None,
'include_all_addons': False,
'include_database': True,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'next_automatic_backup': None,
'next_automatic_backup_additional': False,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'time': None,
}),
}),
}),
'success': True,
'type': 'result',
})
# ---
# name: test_config_load_config_info[storage_data1]
dict({
'id': 1,
'result': dict({
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
'test-agent1',
'hassio.local',
'test-agent2',
]),
'include_addons': list([
'addon1',
'addon2',
]),
'include_all_addons': True,
'include_database': True,
'include_folders': list([
'media',
'share',
]),
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'next_automatic_backup': None,
'next_automatic_backup_additional': False,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'time': None,
}),
}),
}),
'success': True,
'type': 'result',
})
# ---
# name: test_config_load_config_info[storage_data2]
dict({
'id': 1,
'result': dict({
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
'test-agent1',
'hassio.local',
'test-agent2',
]),
'include_addons': list([
'addon1',
'addon2',
]),
'include_all_addons': False,
'include_database': True,
'include_folders': list([
'media',
'share',
]),
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'next_automatic_backup': None,
'next_automatic_backup_additional': False,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'time': None,
}),
}),
}),
'success': True,
'type': 'result',
})
# ---

View File

@ -30,6 +30,7 @@ from aiohasupervisor.models.backups import LOCATION_CLOUD_BACKUP, LOCATION_LOCAL
from aiohasupervisor.models.mounts import MountsInfo
from freezegun.api import FrozenDateTimeFactory
import pytest
from syrupy import SnapshotAssertion
from homeassistant.components.backup import (
DOMAIN as BACKUP_DOMAIN,
@ -38,6 +39,7 @@ from homeassistant.components.backup import (
BackupAgent,
BackupAgentPlatformProtocol,
Folder,
store as backup_store,
)
from homeassistant.components.hassio import DOMAIN
from homeassistant.components.hassio.backup import RESTORE_JOB_ID_ENV
@ -2466,3 +2468,94 @@ async def test_restore_progress_after_restart_unknown_job(
assert response["success"]
assert response["result"]["last_non_idle_event"] is None
assert response["result"]["state"] == "idle"
@pytest.mark.parametrize(
"storage_data",
[
{},
{
"backup": {
"data": {
"backups": [],
"config": {
"agents": {},
"create_backup": {
"agent_ids": ["test-agent1", "hassio.local", "test-agent2"],
"include_addons": ["addon1", "addon2"],
"include_all_addons": True,
"include_database": True,
"include_folders": ["media", "share"],
"name": None,
"password": None,
},
"retention": {"copies": None, "days": None},
"last_attempted_automatic_backup": None,
"last_completed_automatic_backup": None,
"schedule": {
"days": [],
"recurrence": "never",
"state": "never",
"time": None,
},
},
},
"key": DOMAIN,
"version": backup_store.STORAGE_VERSION,
"minor_version": backup_store.STORAGE_VERSION_MINOR,
},
},
{
"backup": {
"data": {
"backups": [],
"config": {
"agents": {},
"create_backup": {
"agent_ids": ["test-agent1", "backup.local", "test-agent2"],
"include_addons": ["addon1", "addon2"],
"include_all_addons": False,
"include_database": True,
"include_folders": ["media", "share"],
"name": None,
"password": None,
},
"retention": {"copies": None, "days": None},
"last_attempted_automatic_backup": None,
"last_completed_automatic_backup": None,
"schedule": {
"days": [],
"recurrence": "never",
"state": "never",
"time": None,
},
},
},
"key": DOMAIN,
"version": backup_store.STORAGE_VERSION,
"minor_version": backup_store.STORAGE_VERSION_MINOR,
},
},
],
)
@pytest.mark.usefixtures("hassio_client")
async def test_config_load_config_info(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
freezer: FrozenDateTimeFactory,
snapshot: SnapshotAssertion,
hass_storage: dict[str, Any],
storage_data: dict[str, Any] | None,
) -> None:
"""Test loading stored backup config and reading it via config/info."""
client = await hass_ws_client(hass)
await hass.config.async_set_time_zone("Europe/Amsterdam")
freezer.move_to("2024-11-13T12:01:00+01:00")
hass_storage.update(storage_data)
assert await async_setup_component(hass, BACKUP_DOMAIN, {BACKUP_DOMAIN: {}})
await hass.async_block_till_done()
await client.send_json_auto_id({"type": "backup/config/info"})
assert await client.receive_json() == snapshot

View File

@ -539,6 +539,70 @@ async def test_list_exposed_entities(
}
async def test_list_exposed_entities_with_filter(
hass: HomeAssistant,
entity_registry: er.EntityRegistry,
hass_ws_client: WebSocketGenerator,
) -> None:
"""Test list exposed entities with filter."""
ws_client = await hass_ws_client(hass)
assert await async_setup_component(hass, "homeassistant", {})
await hass.async_block_till_done()
entry1 = entity_registry.async_get_or_create("test", "test", "unique1")
entry2 = entity_registry.async_get_or_create("test", "test", "unique2")
# Expose 1 to Alexa
await ws_client.send_json_auto_id(
{
"type": "homeassistant/expose_entity",
"assistants": ["cloud.alexa"],
"entity_ids": [entry1.entity_id],
"should_expose": True,
}
)
response = await ws_client.receive_json()
assert response["success"]
# Expose 2 to Google
await ws_client.send_json_auto_id(
{
"type": "homeassistant/expose_entity",
"assistants": ["cloud.google_assistant"],
"entity_ids": [entry2.entity_id],
"should_expose": True,
}
)
response = await ws_client.receive_json()
assert response["success"]
# List with filter
await ws_client.send_json_auto_id(
{"type": "homeassistant/expose_entity/list", "assistant": "cloud.alexa"}
)
response = await ws_client.receive_json()
assert response["success"]
assert response["result"] == {
"exposed_entities": {
"test.test_unique1": {"cloud.alexa": True},
},
}
await ws_client.send_json_auto_id(
{
"type": "homeassistant/expose_entity/list",
"assistant": "cloud.google_assistant",
}
)
response = await ws_client.receive_json()
assert response["success"]
assert response["result"] == {
"exposed_entities": {
"test.test_unique2": {"cloud.google_assistant": True},
},
}
async def test_listeners(
hass: HomeAssistant, entity_registry: er.EntityRegistry
) -> None:

View File

@ -580,13 +580,19 @@ def alarm_clock_fixture_extended():
return alarm_clock
@pytest.fixture(name="speaker_model")
def speaker_model_fixture(request: pytest.FixtureRequest):
"""Create fixture for the speaker model."""
return getattr(request, "param", "Model Name")
@pytest.fixture(name="speaker_info")
def speaker_info_fixture():
def speaker_info_fixture(speaker_model):
"""Create speaker_info fixture."""
return {
"zone_name": "Zone A",
"uid": "RINCON_test",
"model_name": "Model Name",
"model_name": speaker_model,
"model_number": "S12",
"hardware_version": "1.20.1.6-1.1",
"software_version": "49.2-64250",

View File

@ -10,6 +10,7 @@ from syrupy import SnapshotAssertion
from homeassistant.components.media_player import (
ATTR_INPUT_SOURCE,
ATTR_INPUT_SOURCE_LIST,
ATTR_MEDIA_ANNOUNCE,
ATTR_MEDIA_CONTENT_ID,
ATTR_MEDIA_CONTENT_TYPE,
@ -1205,3 +1206,27 @@ async def test_media_get_queue(
)
soco_mock.get_queue.assert_called_with(max_items=0)
assert result == snapshot
@pytest.mark.parametrize(
("speaker_model", "source_list"),
[
("Sonos Arc Ultra", [SOURCE_TV]),
("Sonos Arc", [SOURCE_TV]),
("Sonos Playbar", [SOURCE_TV]),
("Sonos Connect", [SOURCE_LINEIN]),
("Sonos Play:5", [SOURCE_LINEIN]),
("Sonos Amp", [SOURCE_LINEIN, SOURCE_TV]),
("Sonos Era", None),
],
indirect=["speaker_model"],
)
async def test_media_source_list(
hass: HomeAssistant,
async_autosetup_sonos,
speaker_model: str,
source_list: list[str] | None,
) -> None:
"""Test the mapping between the speaker model name and source_list."""
state = hass.states.get("media_player.zone_a")
assert state.attributes.get(ATTR_INPUT_SOURCE_LIST) == source_list

View File

@ -34,7 +34,6 @@ from tests.common import (
mock_integration,
mock_platform,
mock_restore_cache,
reset_translation_cache,
)
from tests.typing import ClientSessionGenerator, WebSocketGenerator
@ -519,9 +518,6 @@ async def test_default_engine_prefer_cloud_entity(
assert provider_engine.name == "test"
assert async_default_engine(hass) == "stt.cloud_stt_entity"
# Reset the `cloud` translations cache to avoid flaky translation checks
reset_translation_cache(hass, ["cloud"])
async def test_get_engine_legacy(
hass: HomeAssistant, tmp_path: Path, mock_provider: MockSTTProvider

View File

@ -44,7 +44,6 @@ from tests.common import (
mock_integration,
mock_platform,
mock_restore_cache,
reset_translation_cache,
)
from tests.typing import ClientSessionGenerator, WebSocketGenerator
@ -1987,6 +1986,3 @@ async def test_default_engine_prefer_cloud_entity(
provider_engine = tts.async_resolve_engine(hass, "test")
assert provider_engine == "test"
assert tts.async_default_engine(hass) == "tts.cloud_tts_entity"
# Reset the `cloud` translations cache to avoid flaky translation checks
reset_translation_cache(hass, ["cloud"])

View File

@ -171,6 +171,7 @@
'models': list([
'LV-PUR131S',
'LV-RH131S',
'LV-RH131S-WM',
]),
'modes': list([
'manual',

View File

@ -11,6 +11,7 @@ import gc
import itertools
import logging
import os
import pathlib
import reprlib
from shutil import rmtree
import sqlite3
@ -49,7 +50,7 @@ from . import patch_recorder
# Setup patching of dt_util time functions before any other Home Assistant imports
from . import patch_time # noqa: F401, isort:skip
from homeassistant import core as ha, loader, runner
from homeassistant import components, core as ha, loader, runner
from homeassistant.auth.const import GROUP_ID_ADMIN, GROUP_ID_READ_ONLY
from homeassistant.auth.models import Credentials
from homeassistant.auth.providers import homeassistant
@ -85,6 +86,7 @@ from homeassistant.helpers import (
issue_registry as ir,
label_registry as lr,
recorder as recorder_helper,
translation as translation_helper,
)
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.translation import _TranslationsCacheData
@ -1211,9 +1213,8 @@ def mock_get_source_ip() -> Generator[_patch]:
def translations_once() -> Generator[_patch]:
"""Only load translations once per session.
Warning: having this as a session fixture can cause issues with tests that
create mock integrations, overriding the real integration translations
with empty ones. Translations should be reset after such tests (see #131628)
Note: To avoid issues with tests that mock integrations, translations for
mocked integrations are cleaned up by the evict_faked_translations fixture.
"""
cache = _TranslationsCacheData({}, {})
patcher = patch(
@ -1227,6 +1228,30 @@ def translations_once() -> Generator[_patch]:
patcher.stop()
@pytest.fixture(autouse=True, scope="module")
def evict_faked_translations(translations_once) -> Generator[_patch]:
"""Clear translations for mocked integrations from the cache after each module."""
real_component_strings = translation_helper._async_get_component_strings
with patch(
"homeassistant.helpers.translation._async_get_component_strings",
wraps=real_component_strings,
) as mock_component_strings:
yield
cache: _TranslationsCacheData = translations_once.kwargs["return_value"]
component_paths = components.__path__
for call in mock_component_strings.mock_calls:
integrations: dict[str, loader.Integration] = call.args[3]
for domain, integration in integrations.items():
if any(
pathlib.Path(f"{component_path}/{domain}") == integration.file_path
for component_path in component_paths
):
continue
for loaded_for_lang in cache.loaded.values():
loaded_for_lang.discard(domain)
@pytest.fixture
def disable_translations_once(
translations_once: _patch,

View File

@ -1,6 +1,8 @@
"""Test test fixture configuration."""
from collections.abc import Generator
from http import HTTPStatus
import pathlib
import socket
from aiohttp import web
@ -9,8 +11,11 @@ import pytest_socket
from homeassistant.components.http import HomeAssistantView
from homeassistant.core import HomeAssistant, async_get_hass
from homeassistant.helpers import translation
from homeassistant.setup import async_setup_component
from .common import MockModule, mock_integration
from .conftest import evict_faked_translations
from .typing import ClientSessionGenerator
@ -70,3 +75,46 @@ async def test_aiohttp_client_frozen_router_view(
assert response.status == HTTPStatus.OK
result = await response.json()
assert result["test"] is True
async def test_evict_faked_translations_assumptions(hass: HomeAssistant) -> None:
"""Test assumptions made when detecting translations for mocked integrations.
If this test fails, the evict_faked_translations may need to be updated.
"""
integration = mock_integration(hass, MockModule("test"), built_in=True)
assert integration.file_path == pathlib.Path("")
async def test_evict_faked_translations(hass: HomeAssistant, translations_once) -> None:
"""Test the evict_faked_translations fixture."""
cache: translation._TranslationsCacheData = translations_once.kwargs["return_value"]
fake_domain = "test"
real_domain = "homeassistant"
# Evict the real domain from the cache in case it's been loaded before
cache.loaded["en"].discard(real_domain)
assert fake_domain not in cache.loaded["en"]
assert real_domain not in cache.loaded["en"]
# The evict_faked_translations fixture has module scope, so we set it up and
# tear it down manually
real_func = evict_faked_translations.__pytest_wrapped__.obj
gen: Generator = real_func(translations_once)
# Set up the evict_faked_translations fixture
next(gen)
mock_integration(hass, MockModule(fake_domain), built_in=True)
await translation.async_load_integrations(hass, {fake_domain, real_domain})
assert fake_domain in cache.loaded["en"]
assert real_domain in cache.loaded["en"]
# Tear down the evict_faked_translations fixture
with pytest.raises(StopIteration):
next(gen)
# The mock integration should be removed from the cache, the real domain should still be there
assert fake_domain not in cache.loaded["en"]
assert real_domain in cache.loaded["en"]