This commit is contained in:
Franck Nijhof 2025-05-09 17:03:40 +02:00 committed by GitHub
commit 00627b82e0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
35 changed files with 430 additions and 132 deletions

View File

@ -22,7 +22,7 @@ from . import util
from .agent import BackupAgent from .agent import BackupAgent
from .const import DATA_MANAGER from .const import DATA_MANAGER
from .manager import BackupManager from .manager import BackupManager
from .models import BackupNotFound from .models import AgentBackup, BackupNotFound
@callback @callback
@ -85,7 +85,15 @@ class DownloadBackupView(HomeAssistantView):
request, headers, backup_id, agent_id, agent, manager request, headers, backup_id, agent_id, agent, manager
) )
return await self._send_backup_with_password( return await self._send_backup_with_password(
hass, request, headers, backup_id, agent_id, password, agent, manager hass,
backup,
request,
headers,
backup_id,
agent_id,
password,
agent,
manager,
) )
except BackupNotFound: except BackupNotFound:
return Response(status=HTTPStatus.NOT_FOUND) return Response(status=HTTPStatus.NOT_FOUND)
@ -116,6 +124,7 @@ class DownloadBackupView(HomeAssistantView):
async def _send_backup_with_password( async def _send_backup_with_password(
self, self,
hass: HomeAssistant, hass: HomeAssistant,
backup: AgentBackup,
request: Request, request: Request,
headers: dict[istr, str], headers: dict[istr, str],
backup_id: str, backup_id: str,
@ -144,7 +153,8 @@ class DownloadBackupView(HomeAssistantView):
stream = util.AsyncIteratorWriter(hass) stream = util.AsyncIteratorWriter(hass)
worker = threading.Thread( worker = threading.Thread(
target=util.decrypt_backup, args=[reader, stream, password, on_done, 0, []] target=util.decrypt_backup,
args=[backup, reader, stream, password, on_done, 0, []],
) )
try: try:
worker.start() worker.start()

View File

@ -295,13 +295,26 @@ def validate_password_stream(
raise BackupEmpty raise BackupEmpty
def _get_expected_archives(backup: AgentBackup) -> set[str]:
"""Get the expected archives in the backup."""
expected_archives = set()
if backup.homeassistant_included:
expected_archives.add("homeassistant")
for addon in backup.addons:
expected_archives.add(addon.slug)
for folder in backup.folders:
expected_archives.add(folder.value)
return expected_archives
def decrypt_backup( def decrypt_backup(
backup: AgentBackup,
input_stream: IO[bytes], input_stream: IO[bytes],
output_stream: IO[bytes], output_stream: IO[bytes],
password: str | None, password: str | None,
on_done: Callable[[Exception | None], None], on_done: Callable[[Exception | None], None],
minimum_size: int, minimum_size: int,
nonces: list[bytes], nonces: NonceGenerator,
) -> None: ) -> None:
"""Decrypt a backup.""" """Decrypt a backup."""
error: Exception | None = None error: Exception | None = None
@ -315,7 +328,7 @@ def decrypt_backup(
fileobj=output_stream, mode="w|", bufsize=BUF_SIZE fileobj=output_stream, mode="w|", bufsize=BUF_SIZE
) as output_tar, ) as output_tar,
): ):
_decrypt_backup(input_tar, output_tar, password) _decrypt_backup(backup, input_tar, output_tar, password)
except (DecryptError, SecureTarError, tarfile.TarError) as err: except (DecryptError, SecureTarError, tarfile.TarError) as err:
LOGGER.warning("Error decrypting backup: %s", err) LOGGER.warning("Error decrypting backup: %s", err)
error = err error = err
@ -333,15 +346,18 @@ def decrypt_backup(
def _decrypt_backup( def _decrypt_backup(
backup: AgentBackup,
input_tar: tarfile.TarFile, input_tar: tarfile.TarFile,
output_tar: tarfile.TarFile, output_tar: tarfile.TarFile,
password: str | None, password: str | None,
) -> None: ) -> None:
"""Decrypt a backup.""" """Decrypt a backup."""
expected_archives = _get_expected_archives(backup)
for obj in input_tar: for obj in input_tar:
# We compare with PurePath to avoid issues with different path separators, # We compare with PurePath to avoid issues with different path separators,
# for example when backup.json is added as "./backup.json" # for example when backup.json is added as "./backup.json"
if PurePath(obj.name) == PurePath("backup.json"): object_path = PurePath(obj.name)
if object_path == PurePath("backup.json"):
# Rewrite the backup.json file to indicate that the backup is decrypted # Rewrite the backup.json file to indicate that the backup is decrypted
if not (reader := input_tar.extractfile(obj)): if not (reader := input_tar.extractfile(obj)):
raise DecryptError raise DecryptError
@ -352,7 +368,13 @@ def _decrypt_backup(
metadata_obj.size = len(updated_metadata_b) metadata_obj.size = len(updated_metadata_b)
output_tar.addfile(metadata_obj, BytesIO(updated_metadata_b)) output_tar.addfile(metadata_obj, BytesIO(updated_metadata_b))
continue continue
if not obj.name.endswith((".tar", ".tgz", ".tar.gz")): prefix, _, suffix = object_path.name.partition(".")
if suffix not in ("tar", "tgz", "tar.gz"):
LOGGER.debug("Unknown file %s will not be decrypted", obj.name)
output_tar.addfile(obj, input_tar.extractfile(obj))
continue
if prefix not in expected_archives:
LOGGER.debug("Unknown inner tar file %s will not be decrypted", obj.name)
output_tar.addfile(obj, input_tar.extractfile(obj)) output_tar.addfile(obj, input_tar.extractfile(obj))
continue continue
istf = SecureTarFile( istf = SecureTarFile(
@ -371,12 +393,13 @@ def _decrypt_backup(
def encrypt_backup( def encrypt_backup(
backup: AgentBackup,
input_stream: IO[bytes], input_stream: IO[bytes],
output_stream: IO[bytes], output_stream: IO[bytes],
password: str | None, password: str | None,
on_done: Callable[[Exception | None], None], on_done: Callable[[Exception | None], None],
minimum_size: int, minimum_size: int,
nonces: list[bytes], nonces: NonceGenerator,
) -> None: ) -> None:
"""Encrypt a backup.""" """Encrypt a backup."""
error: Exception | None = None error: Exception | None = None
@ -390,7 +413,7 @@ def encrypt_backup(
fileobj=output_stream, mode="w|", bufsize=BUF_SIZE fileobj=output_stream, mode="w|", bufsize=BUF_SIZE
) as output_tar, ) as output_tar,
): ):
_encrypt_backup(input_tar, output_tar, password, nonces) _encrypt_backup(backup, input_tar, output_tar, password, nonces)
except (EncryptError, SecureTarError, tarfile.TarError) as err: except (EncryptError, SecureTarError, tarfile.TarError) as err:
LOGGER.warning("Error encrypting backup: %s", err) LOGGER.warning("Error encrypting backup: %s", err)
error = err error = err
@ -408,17 +431,20 @@ def encrypt_backup(
def _encrypt_backup( def _encrypt_backup(
backup: AgentBackup,
input_tar: tarfile.TarFile, input_tar: tarfile.TarFile,
output_tar: tarfile.TarFile, output_tar: tarfile.TarFile,
password: str | None, password: str | None,
nonces: list[bytes], nonces: NonceGenerator,
) -> None: ) -> None:
"""Encrypt a backup.""" """Encrypt a backup."""
inner_tar_idx = 0 inner_tar_idx = 0
expected_archives = _get_expected_archives(backup)
for obj in input_tar: for obj in input_tar:
# We compare with PurePath to avoid issues with different path separators, # We compare with PurePath to avoid issues with different path separators,
# for example when backup.json is added as "./backup.json" # for example when backup.json is added as "./backup.json"
if PurePath(obj.name) == PurePath("backup.json"): object_path = PurePath(obj.name)
if object_path == PurePath("backup.json"):
# Rewrite the backup.json file to indicate that the backup is encrypted # Rewrite the backup.json file to indicate that the backup is encrypted
if not (reader := input_tar.extractfile(obj)): if not (reader := input_tar.extractfile(obj)):
raise EncryptError raise EncryptError
@ -429,16 +455,21 @@ def _encrypt_backup(
metadata_obj.size = len(updated_metadata_b) metadata_obj.size = len(updated_metadata_b)
output_tar.addfile(metadata_obj, BytesIO(updated_metadata_b)) output_tar.addfile(metadata_obj, BytesIO(updated_metadata_b))
continue continue
if not obj.name.endswith((".tar", ".tgz", ".tar.gz")): prefix, _, suffix = object_path.name.partition(".")
if suffix not in ("tar", "tgz", "tar.gz"):
LOGGER.debug("Unknown file %s will not be encrypted", obj.name)
output_tar.addfile(obj, input_tar.extractfile(obj)) output_tar.addfile(obj, input_tar.extractfile(obj))
continue continue
if prefix not in expected_archives:
LOGGER.debug("Unknown inner tar file %s will not be encrypted", obj.name)
continue
istf = SecureTarFile( istf = SecureTarFile(
None, # Not used None, # Not used
gzip=False, gzip=False,
key=password_to_key(password) if password is not None else None, key=password_to_key(password) if password is not None else None,
mode="r", mode="r",
fileobj=input_tar.extractfile(obj), fileobj=input_tar.extractfile(obj),
nonce=nonces[inner_tar_idx], nonce=nonces.get(inner_tar_idx),
) )
inner_tar_idx += 1 inner_tar_idx += 1
with istf.encrypt(obj) as encrypted: with istf.encrypt(obj) as encrypted:
@ -456,17 +487,33 @@ class _CipherWorkerStatus:
writer: AsyncIteratorWriter writer: AsyncIteratorWriter
class NonceGenerator:
"""Generate nonces for encryption."""
def __init__(self) -> None:
"""Initialize the generator."""
self._nonces: dict[int, bytes] = {}
def get(self, index: int) -> bytes:
"""Get a nonce for the given index."""
if index not in self._nonces:
# Generate a new nonce for the given index
self._nonces[index] = os.urandom(16)
return self._nonces[index]
class _CipherBackupStreamer: class _CipherBackupStreamer:
"""Encrypt or decrypt a backup.""" """Encrypt or decrypt a backup."""
_cipher_func: Callable[ _cipher_func: Callable[
[ [
AgentBackup,
IO[bytes], IO[bytes],
IO[bytes], IO[bytes],
str | None, str | None,
Callable[[Exception | None], None], Callable[[Exception | None], None],
int, int,
list[bytes], NonceGenerator,
], ],
None, None,
] ]
@ -484,7 +531,7 @@ class _CipherBackupStreamer:
self._hass = hass self._hass = hass
self._open_stream = open_stream self._open_stream = open_stream
self._password = password self._password = password
self._nonces: list[bytes] = [] self._nonces = NonceGenerator()
def size(self) -> int: def size(self) -> int:
"""Return the maximum size of the decrypted or encrypted backup.""" """Return the maximum size of the decrypted or encrypted backup."""
@ -508,7 +555,15 @@ class _CipherBackupStreamer:
writer = AsyncIteratorWriter(self._hass) writer = AsyncIteratorWriter(self._hass)
worker = threading.Thread( worker = threading.Thread(
target=self._cipher_func, target=self._cipher_func,
args=[reader, writer, self._password, on_done, self.size(), self._nonces], args=[
self._backup,
reader,
writer,
self._password,
on_done,
self.size(),
self._nonces,
],
) )
worker_status = _CipherWorkerStatus( worker_status = _CipherWorkerStatus(
done=asyncio.Event(), reader=reader, thread=worker, writer=writer done=asyncio.Event(), reader=reader, thread=worker, writer=writer
@ -538,17 +593,6 @@ class DecryptedBackupStreamer(_CipherBackupStreamer):
class EncryptedBackupStreamer(_CipherBackupStreamer): class EncryptedBackupStreamer(_CipherBackupStreamer):
"""Encrypt a backup.""" """Encrypt a backup."""
def __init__(
self,
hass: HomeAssistant,
backup: AgentBackup,
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
password: str | None,
) -> None:
"""Initialize."""
super().__init__(hass, backup, open_stream, password)
self._nonces = [os.urandom(16) for _ in range(self._num_tar_files())]
_cipher_func = staticmethod(encrypt_backup) _cipher_func = staticmethod(encrypt_backup)
def backup(self) -> AgentBackup: def backup(self) -> AgentBackup:

View File

@ -68,7 +68,7 @@ async def async_validate_hostname(
result = False result = False
with contextlib.suppress(DNSError): with contextlib.suppress(DNSError):
result = bool( result = bool(
await aiodns.DNSResolver( await aiodns.DNSResolver( # type: ignore[call-overload]
nameservers=[resolver], udp_port=port, tcp_port=port nameservers=[resolver], udp_port=port, tcp_port=port
).query(hostname, qtype) ).query(hostname, qtype)
) )

View File

@ -5,5 +5,5 @@
"config_flow": true, "config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/dnsip", "documentation": "https://www.home-assistant.io/integrations/dnsip",
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"requirements": ["aiodns==3.3.0"] "requirements": ["aiodns==3.4.0"]
} }

View File

@ -106,7 +106,7 @@ class WanIpSensor(SensorEntity):
async def async_update(self) -> None: async def async_update(self) -> None:
"""Get the current DNS IP address for hostname.""" """Get the current DNS IP address for hostname."""
try: try:
response = await self.resolver.query(self.hostname, self.querytype) response = await self.resolver.query(self.hostname, self.querytype) # type: ignore[call-overload]
except DNSError as err: except DNSError as err:
_LOGGER.warning("Exception while resolving host: %s", err) _LOGGER.warning("Exception while resolving host: %s", err)
response = None response = None

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/forecast_solar", "documentation": "https://www.home-assistant.io/integrations/forecast_solar",
"integration_type": "service", "integration_type": "service",
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"requirements": ["forecast-solar==4.1.0"] "requirements": ["forecast-solar==4.2.0"]
} }

View File

@ -92,7 +92,7 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
available_main_ains = [ available_main_ains = [
ain ain
for ain, dev in data.devices.items() for ain, dev in data.devices.items() | data.templates.items()
if dev.device_and_unit_id[1] is None if dev.device_and_unit_id[1] is None
] ]
device_reg = dr.async_get(self.hass) device_reg = dr.async_get(self.hass)

View File

@ -45,7 +45,15 @@ type FroniusConfigEntry = ConfigEntry[FroniusSolarNet]
async def async_setup_entry(hass: HomeAssistant, entry: FroniusConfigEntry) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: FroniusConfigEntry) -> bool:
"""Set up fronius from a config entry.""" """Set up fronius from a config entry."""
host = entry.data[CONF_HOST] host = entry.data[CONF_HOST]
fronius = Fronius(async_get_clientsession(hass), host) fronius = Fronius(
async_get_clientsession(
hass,
# Fronius Gen24 firmware 1.35.4-1 redirects to HTTPS with self-signed
# certificate. See https://github.com/home-assistant/core/issues/138881
verify_ssl=False,
),
host,
)
solar_net = FroniusSolarNet(hass, entry, fronius) solar_net = FroniusSolarNet(hass, entry, fronius)
await solar_net.init_devices() await solar_net.init_devices()

View File

@ -20,5 +20,5 @@
"documentation": "https://www.home-assistant.io/integrations/frontend", "documentation": "https://www.home-assistant.io/integrations/frontend",
"integration_type": "system", "integration_type": "system",
"quality_scale": "internal", "quality_scale": "internal",
"requirements": ["home-assistant-frontend==20250507.0"] "requirements": ["home-assistant-frontend==20250509.0"]
} }

View File

@ -8,7 +8,13 @@ from pyhap.const import CATEGORY_AIR_PURIFIER
from pyhap.service import Service from pyhap.service import Service
from pyhap.util import callback as pyhap_callback from pyhap.util import callback as pyhap_callback
from homeassistant.const import STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.const import (
ATTR_UNIT_OF_MEASUREMENT,
STATE_ON,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
UnitOfTemperature,
)
from homeassistant.core import ( from homeassistant.core import (
Event, Event,
EventStateChangedData, EventStateChangedData,
@ -43,7 +49,12 @@ from .const import (
THRESHOLD_FILTER_CHANGE_NEEDED, THRESHOLD_FILTER_CHANGE_NEEDED,
) )
from .type_fans import ATTR_PRESET_MODE, CHAR_ROTATION_SPEED, Fan from .type_fans import ATTR_PRESET_MODE, CHAR_ROTATION_SPEED, Fan
from .util import cleanup_name_for_homekit, convert_to_float, density_to_air_quality from .util import (
cleanup_name_for_homekit,
convert_to_float,
density_to_air_quality,
temperature_to_homekit,
)
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@ -345,8 +356,13 @@ class AirPurifier(Fan):
): ):
return return
unit = new_state.attributes.get(
ATTR_UNIT_OF_MEASUREMENT, UnitOfTemperature.CELSIUS
)
current_temperature = temperature_to_homekit(current_temperature, unit)
_LOGGER.debug( _LOGGER.debug(
"%s: Linked temperature sensor %s changed to %d", "%s: Linked temperature sensor %s changed to %d °C",
self.entity_id, self.entity_id,
self.linked_temperature_sensor, self.linked_temperature_sensor,
current_temperature, current_temperature,

View File

@ -37,5 +37,5 @@
"iot_class": "cloud_push", "iot_class": "cloud_push",
"loggers": ["pylamarzocco"], "loggers": ["pylamarzocco"],
"quality_scale": "platinum", "quality_scale": "platinum",
"requirements": ["pylamarzocco==2.0.0"] "requirements": ["pylamarzocco==2.0.1"]
} }

View File

@ -132,17 +132,18 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback, async_add_entities: AddConfigEntryEntitiesCallback,
) -> None: ) -> None:
"""Set up sensor entities.""" """Set up sensor entities."""
coordinator = entry.runtime_data.config_coordinator config_coordinator = entry.runtime_data.config_coordinator
statistic_coordinators = entry.runtime_data.statistics_coordinator
entities = [ entities = [
LaMarzoccoSensorEntity(coordinator, description) LaMarzoccoSensorEntity(config_coordinator, description)
for description in ENTITIES for description in ENTITIES
if description.supported_fn(coordinator) if description.supported_fn(config_coordinator)
] ]
entities.extend( entities.extend(
LaMarzoccoStatisticSensorEntity(coordinator, description) LaMarzoccoStatisticSensorEntity(statistic_coordinators, description)
for description in STATISTIC_ENTITIES for description in STATISTIC_ENTITIES
if description.supported_fn(coordinator) if description.supported_fn(statistic_coordinators)
) )
async_add_entities(entities) async_add_entities(entities)

View File

@ -6,7 +6,6 @@ import logging
from typing import Any from typing import Any
from pypoint import PointSession from pypoint import PointSession
from tempora.utc import fromtimestamp
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
@ -62,7 +61,9 @@ class PointDataUpdateCoordinator(DataUpdateCoordinator[dict[str, dict[str, Any]]
or device.device_id not in self.device_updates or device.device_id not in self.device_updates
or self.device_updates[device.device_id] < last_updated or self.device_updates[device.device_id] < last_updated
): ):
self.device_updates[device.device_id] = last_updated or fromtimestamp(0) self.device_updates[device.device_id] = (
last_updated or datetime.fromtimestamp(0)
)
self.data[device.device_id] = { self.data[device.device_id] = {
k: await device.sensor(k) k: await device.sensor(k)
for k in ("temperature", "humidity", "sound_pressure") for k in ("temperature", "humidity", "sound_pressure")

View File

@ -23,7 +23,7 @@ from homeassistant.helpers import (
device_registry as dr, device_registry as dr,
entity_registry as er, entity_registry as er,
) )
from homeassistant.helpers.device_registry import format_mac from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, format_mac
from homeassistant.helpers.event import async_call_later from homeassistant.helpers.event import async_call_later
from homeassistant.helpers.typing import ConfigType from homeassistant.helpers.typing import ConfigType
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
@ -380,6 +380,14 @@ def migrate_entity_ids(
if ch is None or is_chime: if ch is None or is_chime:
continue # Do not consider the NVR itself or chimes continue # Do not consider the NVR itself or chimes
# Check for wrongfully added MAC of the NVR/Hub to the camera
# Can be removed in HA 2025.12
host_connnection = (CONNECTION_NETWORK_MAC, host.api.mac_address)
if host_connnection in device.connections:
new_connections = device.connections.copy()
new_connections.remove(host_connnection)
device_reg.async_update_device(device.id, new_connections=new_connections)
ch_device_ids[device.id] = ch ch_device_ids[device.id] = ch
if host.api.supported(ch, "UID") and device_uid[1] != host.api.camera_uid(ch): if host.api.supported(ch, "UID") and device_uid[1] != host.api.camera_uid(ch):
if host.api.supported(None, "UID"): if host.api.supported(None, "UID"):

View File

@ -97,6 +97,7 @@ SKU_TO_BASE_DEVICE = {
"LAP-V102S-AASR": "Vital100S", # Alt ID Model Vital100S "LAP-V102S-AASR": "Vital100S", # Alt ID Model Vital100S
"LAP-V102S-WEU": "Vital100S", # Alt ID Model Vital100S "LAP-V102S-WEU": "Vital100S", # Alt ID Model Vital100S
"LAP-V102S-WUK": "Vital100S", # Alt ID Model Vital100S "LAP-V102S-WUK": "Vital100S", # Alt ID Model Vital100S
"LAP-V102S-AUSR": "Vital100S", # Alt ID Model Vital100S
"EverestAir": "EverestAir", "EverestAir": "EverestAir",
"LAP-EL551S-AUS": "EverestAir", # Alt ID Model EverestAir "LAP-EL551S-AUS": "EverestAir", # Alt ID Model EverestAir
"LAP-EL551S-AEUR": "EverestAir", # Alt ID Model EverestAir "LAP-EL551S-AEUR": "EverestAir", # Alt ID Model EverestAir

View File

@ -88,6 +88,7 @@ from .const import (
DATA_CLIENT, DATA_CLIENT,
DOMAIN, DOMAIN,
EVENT_DEVICE_ADDED_TO_REGISTRY, EVENT_DEVICE_ADDED_TO_REGISTRY,
RESTORE_NVM_DRIVER_READY_TIMEOUT,
USER_AGENT, USER_AGENT,
) )
from .helpers import ( from .helpers import (
@ -3063,14 +3064,28 @@ async def websocket_restore_nvm(
) )
) )
@callback
def set_driver_ready(event: dict) -> None:
"Set the driver ready event."
wait_driver_ready.set()
wait_driver_ready = asyncio.Event()
# Set up subscription for progress events # Set up subscription for progress events
connection.subscriptions[msg["id"]] = async_cleanup connection.subscriptions[msg["id"]] = async_cleanup
msg[DATA_UNSUBSCRIBE] = unsubs = [ msg[DATA_UNSUBSCRIBE] = unsubs = [
controller.on("nvm convert progress", forward_progress), controller.on("nvm convert progress", forward_progress),
controller.on("nvm restore progress", forward_progress), controller.on("nvm restore progress", forward_progress),
driver.once("driver ready", set_driver_ready),
] ]
await controller.async_restore_nvm_base64(msg["data"]) await controller.async_restore_nvm_base64(msg["data"])
with suppress(TimeoutError):
async with asyncio.timeout(RESTORE_NVM_DRIVER_READY_TIMEOUT):
await wait_driver_ready.wait()
await hass.config_entries.async_reload(entry.entry_id)
connection.send_message( connection.send_message(
websocket_api.event_message( websocket_api.event_message(
msg[ID], msg[ID],

View File

@ -67,6 +67,7 @@ from .const import (
CONF_USE_ADDON, CONF_USE_ADDON,
DATA_CLIENT, DATA_CLIENT,
DOMAIN, DOMAIN,
RESTORE_NVM_DRIVER_READY_TIMEOUT,
) )
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@ -78,7 +79,6 @@ ADDON_SETUP_TIMEOUT = 5
ADDON_SETUP_TIMEOUT_ROUNDS = 40 ADDON_SETUP_TIMEOUT_ROUNDS = 40
CONF_EMULATE_HARDWARE = "emulate_hardware" CONF_EMULATE_HARDWARE = "emulate_hardware"
CONF_LOG_LEVEL = "log_level" CONF_LOG_LEVEL = "log_level"
RESTORE_NVM_DRIVER_READY_TIMEOUT = 60
SERVER_VERSION_TIMEOUT = 10 SERVER_VERSION_TIMEOUT = 10
ADDON_LOG_LEVELS = { ADDON_LOG_LEVELS = {

View File

@ -201,3 +201,7 @@ COVER_TILT_PROPERTY_KEYS: set[str | int | None] = {
WindowCoveringPropertyKey.VERTICAL_SLATS_ANGLE, WindowCoveringPropertyKey.VERTICAL_SLATS_ANGLE,
WindowCoveringPropertyKey.VERTICAL_SLATS_ANGLE_NO_POSITION, WindowCoveringPropertyKey.VERTICAL_SLATS_ANGLE_NO_POSITION,
} }
# Other constants
RESTORE_NVM_DRIVER_READY_TIMEOUT = 60

View File

@ -1204,7 +1204,7 @@ DISCOVERY_SCHEMAS = [
property={RESET_METER_PROPERTY}, property={RESET_METER_PROPERTY},
type={ValueType.BOOLEAN}, type={ValueType.BOOLEAN},
), ),
entity_category=EntityCategory.DIAGNOSTIC, entity_category=EntityCategory.CONFIG,
), ),
ZWaveDiscoverySchema( ZWaveDiscoverySchema(
platform=Platform.BINARY_SENSOR, platform=Platform.BINARY_SENSOR,

View File

@ -25,7 +25,7 @@ if TYPE_CHECKING:
APPLICATION_NAME: Final = "HomeAssistant" APPLICATION_NAME: Final = "HomeAssistant"
MAJOR_VERSION: Final = 2025 MAJOR_VERSION: Final = 2025
MINOR_VERSION: Final = 5 MINOR_VERSION: Final = 5
PATCH_VERSION: Final = "0" PATCH_VERSION: Final = "1"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 2) REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 2)

View File

@ -575,8 +575,10 @@ class DeviceRegistryItems[_EntryTypeT: (DeviceEntry, DeletedDeviceEntry)](
"""Unindex an entry.""" """Unindex an entry."""
old_entry = self.data[key] old_entry = self.data[key]
for connection in old_entry.connections: for connection in old_entry.connections:
if connection in self._connections:
del self._connections[connection] del self._connections[connection]
for identifier in old_entry.identifiers: for identifier in old_entry.identifiers:
if identifier in self._identifiers:
del self._identifiers[identifier] del self._identifiers[identifier]
def get_entry( def get_entry(

View File

@ -2,7 +2,7 @@
aiodhcpwatcher==1.1.1 aiodhcpwatcher==1.1.1
aiodiscover==2.6.1 aiodiscover==2.6.1
aiodns==3.3.0 aiodns==3.4.0
aiohasupervisor==0.3.1 aiohasupervisor==0.3.1
aiohttp-asyncmdnsresolver==0.1.1 aiohttp-asyncmdnsresolver==0.1.1
aiohttp-fast-zlib==0.2.3 aiohttp-fast-zlib==0.2.3
@ -38,7 +38,7 @@ habluetooth==3.48.2
hass-nabucasa==0.96.0 hass-nabucasa==0.96.0
hassil==2.2.3 hassil==2.2.3
home-assistant-bluetooth==1.13.1 home-assistant-bluetooth==1.13.1
home-assistant-frontend==20250507.0 home-assistant-frontend==20250509.0
home-assistant-intents==2025.5.7 home-assistant-intents==2025.5.7
httpx==0.28.1 httpx==0.28.1
ifaddr==0.2.0 ifaddr==0.2.0

View File

@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project] [project]
name = "homeassistant" name = "homeassistant"
version = "2025.5.0" version = "2025.5.1"
license = "Apache-2.0" license = "Apache-2.0"
license-files = ["LICENSE*", "homeassistant/backports/LICENSE*"] license-files = ["LICENSE*", "homeassistant/backports/LICENSE*"]
description = "Open-source home automation platform running on Python 3." description = "Open-source home automation platform running on Python 3."
@ -23,7 +23,7 @@ classifiers = [
] ]
requires-python = ">=3.13.2" requires-python = ">=3.13.2"
dependencies = [ dependencies = [
"aiodns==3.3.0", "aiodns==3.4.0",
# Integrations may depend on hassio integration without listing it to # Integrations may depend on hassio integration without listing it to
# change behavior based on presence of supervisor. Deprecated with #127228 # change behavior based on presence of supervisor. Deprecated with #127228
# Lib can be removed with 2025.11 # Lib can be removed with 2025.11

2
requirements.txt generated
View File

@ -3,7 +3,7 @@
-c homeassistant/package_constraints.txt -c homeassistant/package_constraints.txt
# Home Assistant Core # Home Assistant Core
aiodns==3.3.0 aiodns==3.4.0
aiohasupervisor==0.3.1 aiohasupervisor==0.3.1
aiohttp==3.11.18 aiohttp==3.11.18
aiohttp_cors==0.7.0 aiohttp_cors==0.7.0

8
requirements_all.txt generated
View File

@ -223,7 +223,7 @@ aiodhcpwatcher==1.1.1
aiodiscover==2.6.1 aiodiscover==2.6.1
# homeassistant.components.dnsip # homeassistant.components.dnsip
aiodns==3.3.0 aiodns==3.4.0
# homeassistant.components.duke_energy # homeassistant.components.duke_energy
aiodukeenergy==0.3.0 aiodukeenergy==0.3.0
@ -958,7 +958,7 @@ fnv-hash-fast==1.5.0
foobot_async==1.0.0 foobot_async==1.0.0
# homeassistant.components.forecast_solar # homeassistant.components.forecast_solar
forecast-solar==4.1.0 forecast-solar==4.2.0
# homeassistant.components.fortios # homeassistant.components.fortios
fortiosapi==1.0.5 fortiosapi==1.0.5
@ -1161,7 +1161,7 @@ hole==0.8.0
holidays==0.70 holidays==0.70
# homeassistant.components.frontend # homeassistant.components.frontend
home-assistant-frontend==20250507.0 home-assistant-frontend==20250509.0
# homeassistant.components.conversation # homeassistant.components.conversation
home-assistant-intents==2025.5.7 home-assistant-intents==2025.5.7
@ -2093,7 +2093,7 @@ pykwb==0.0.8
pylacrosse==0.4 pylacrosse==0.4
# homeassistant.components.lamarzocco # homeassistant.components.lamarzocco
pylamarzocco==2.0.0 pylamarzocco==2.0.1
# homeassistant.components.lastfm # homeassistant.components.lastfm
pylast==5.1.0 pylast==5.1.0

View File

@ -211,7 +211,7 @@ aiodhcpwatcher==1.1.1
aiodiscover==2.6.1 aiodiscover==2.6.1
# homeassistant.components.dnsip # homeassistant.components.dnsip
aiodns==3.3.0 aiodns==3.4.0
# homeassistant.components.duke_energy # homeassistant.components.duke_energy
aiodukeenergy==0.3.0 aiodukeenergy==0.3.0
@ -818,7 +818,7 @@ fnv-hash-fast==1.5.0
foobot_async==1.0.0 foobot_async==1.0.0
# homeassistant.components.forecast_solar # homeassistant.components.forecast_solar
forecast-solar==4.1.0 forecast-solar==4.2.0
# homeassistant.components.freebox # homeassistant.components.freebox
freebox-api==1.2.2 freebox-api==1.2.2
@ -991,7 +991,7 @@ hole==0.8.0
holidays==0.70 holidays==0.70
# homeassistant.components.frontend # homeassistant.components.frontend
home-assistant-frontend==20250507.0 home-assistant-frontend==20250509.0
# homeassistant.components.conversation # homeassistant.components.conversation
home-assistant-intents==2025.5.7 home-assistant-intents==2025.5.7
@ -1708,7 +1708,7 @@ pykrakenapi==0.1.8
pykulersky==0.5.8 pykulersky==0.5.8
# homeassistant.components.lamarzocco # homeassistant.components.lamarzocco
pylamarzocco==2.0.0 pylamarzocco==2.0.1
# homeassistant.components.lastfm # homeassistant.components.lastfm
pylast==5.1.0 pylast==5.1.0

View File

@ -177,7 +177,7 @@ async def _test_downloading_encrypted_backup(
enc_metadata = json.loads(outer_tar.extractfile("./backup.json").read()) enc_metadata = json.loads(outer_tar.extractfile("./backup.json").read())
assert enc_metadata["protected"] is True assert enc_metadata["protected"] is True
with ( with (
outer_tar.extractfile("core.tar.gz") as inner_tar_file, outer_tar.extractfile("homeassistant.tar.gz") as inner_tar_file,
pytest.raises(tarfile.ReadError, match="file could not be opened"), pytest.raises(tarfile.ReadError, match="file could not be opened"),
): ):
# pylint: disable-next=consider-using-with # pylint: disable-next=consider-using-with
@ -209,7 +209,7 @@ async def _test_downloading_encrypted_backup(
dec_metadata = json.loads(outer_tar.extractfile("./backup.json").read()) dec_metadata = json.loads(outer_tar.extractfile("./backup.json").read())
assert dec_metadata == enc_metadata | {"protected": False} assert dec_metadata == enc_metadata | {"protected": False}
with ( with (
outer_tar.extractfile("core.tar.gz") as inner_tar_file, outer_tar.extractfile("homeassistant.tar.gz") as inner_tar_file,
tarfile.open(fileobj=inner_tar_file, mode="r") as inner_tar, tarfile.open(fileobj=inner_tar_file, mode="r") as inner_tar,
): ):
assert inner_tar.getnames() == [ assert inner_tar.getnames() == [

View File

@ -174,7 +174,10 @@ async def test_decrypted_backup_streamer(hass: HomeAssistant) -> None:
) )
encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN) encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN)
backup = AgentBackup( backup = AgentBackup(
addons=["addon_1", "addon_2"], addons=[
AddonInfo(name="Core 1", slug="core1", version="1.0.0"),
AddonInfo(name="Core 2", slug="core2", version="1.0.0"),
],
backup_id="1234", backup_id="1234",
date="2024-12-02T07:23:58.261875-05:00", date="2024-12-02T07:23:58.261875-05:00",
database_included=False, database_included=False,
@ -218,7 +221,10 @@ async def test_decrypted_backup_streamer_interrupt_stuck_reader(
"""Test the decrypted backup streamer.""" """Test the decrypted backup streamer."""
encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN) encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN)
backup = AgentBackup( backup = AgentBackup(
addons=["addon_1", "addon_2"], addons=[
AddonInfo(name="Core 1", slug="core1", version="1.0.0"),
AddonInfo(name="Core 2", slug="core2", version="1.0.0"),
],
backup_id="1234", backup_id="1234",
date="2024-12-02T07:23:58.261875-05:00", date="2024-12-02T07:23:58.261875-05:00",
database_included=False, database_included=False,
@ -253,7 +259,10 @@ async def test_decrypted_backup_streamer_interrupt_stuck_writer(
"""Test the decrypted backup streamer.""" """Test the decrypted backup streamer."""
encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN) encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN)
backup = AgentBackup( backup = AgentBackup(
addons=["addon_1", "addon_2"], addons=[
AddonInfo(name="Core 1", slug="core1", version="1.0.0"),
AddonInfo(name="Core 2", slug="core2", version="1.0.0"),
],
backup_id="1234", backup_id="1234",
date="2024-12-02T07:23:58.261875-05:00", date="2024-12-02T07:23:58.261875-05:00",
database_included=False, database_included=False,
@ -283,7 +292,10 @@ async def test_decrypted_backup_streamer_wrong_password(hass: HomeAssistant) ->
"""Test the decrypted backup streamer with wrong password.""" """Test the decrypted backup streamer with wrong password."""
encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN) encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN)
backup = AgentBackup( backup = AgentBackup(
addons=["addon_1", "addon_2"], addons=[
AddonInfo(name="Core 1", slug="core1", version="1.0.0"),
AddonInfo(name="Core 2", slug="core2", version="1.0.0"),
],
backup_id="1234", backup_id="1234",
date="2024-12-02T07:23:58.261875-05:00", date="2024-12-02T07:23:58.261875-05:00",
database_included=False, database_included=False,
@ -320,7 +332,10 @@ async def test_encrypted_backup_streamer(hass: HomeAssistant) -> None:
) )
encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN) encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN)
backup = AgentBackup( backup = AgentBackup(
addons=["addon_1", "addon_2"], addons=[
AddonInfo(name="Core 1", slug="core1", version="1.0.0"),
AddonInfo(name="Core 2", slug="core2", version="1.0.0"),
],
backup_id="1234", backup_id="1234",
date="2024-12-02T07:23:58.261875-05:00", date="2024-12-02T07:23:58.261875-05:00",
database_included=False, database_included=False,
@ -353,6 +368,7 @@ async def test_encrypted_backup_streamer(hass: HomeAssistant) -> None:
bytes.fromhex("00000000000000000000000000000000"), bytes.fromhex("00000000000000000000000000000000"),
) )
encryptor = EncryptedBackupStreamer(hass, backup, open_backup, "hunter2") encryptor = EncryptedBackupStreamer(hass, backup, open_backup, "hunter2")
assert encryptor.backup() == dataclasses.replace( assert encryptor.backup() == dataclasses.replace(
backup, protected=True, size=backup.size + len(expected_padding) backup, protected=True, size=backup.size + len(expected_padding)
) )
@ -377,7 +393,10 @@ async def test_encrypted_backup_streamer_interrupt_stuck_reader(
"test_backups/c0cb53bd.tar.decrypted", DOMAIN "test_backups/c0cb53bd.tar.decrypted", DOMAIN
) )
backup = AgentBackup( backup = AgentBackup(
addons=["addon_1", "addon_2"], addons=[
AddonInfo(name="Core 1", slug="core1", version="1.0.0"),
AddonInfo(name="Core 2", slug="core2", version="1.0.0"),
],
backup_id="1234", backup_id="1234",
date="2024-12-02T07:23:58.261875-05:00", date="2024-12-02T07:23:58.261875-05:00",
database_included=False, database_included=False,
@ -414,7 +433,10 @@ async def test_encrypted_backup_streamer_interrupt_stuck_writer(
"test_backups/c0cb53bd.tar.decrypted", DOMAIN "test_backups/c0cb53bd.tar.decrypted", DOMAIN
) )
backup = AgentBackup( backup = AgentBackup(
addons=["addon_1", "addon_2"], addons=[
AddonInfo(name="Core 1", slug="core1", version="1.0.0"),
AddonInfo(name="Core 2", slug="core2", version="1.0.0"),
],
backup_id="1234", backup_id="1234",
date="2024-12-02T07:23:58.261875-05:00", date="2024-12-02T07:23:58.261875-05:00",
database_included=False, database_included=False,
@ -447,7 +469,10 @@ async def test_encrypted_backup_streamer_random_nonce(hass: HomeAssistant) -> No
) )
encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN) encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN)
backup = AgentBackup( backup = AgentBackup(
addons=["addon_1", "addon_2"], addons=[
AddonInfo(name="Core 1", slug="core1", version="1.0.0"),
AddonInfo(name="Core 2", slug="core2", version="1.0.0"),
],
backup_id="1234", backup_id="1234",
date="2024-12-02T07:23:58.261875-05:00", date="2024-12-02T07:23:58.261875-05:00",
database_included=False, database_included=False,
@ -490,7 +515,7 @@ async def test_encrypted_backup_streamer_random_nonce(hass: HomeAssistant) -> No
await encryptor1.wait() await encryptor1.wait()
await encryptor2.wait() await encryptor2.wait()
# Output from the two streames should differ but have the same length. # Output from the two streams should differ but have the same length.
assert encrypted_output1 != encrypted_output3 assert encrypted_output1 != encrypted_output3
assert len(encrypted_output1) == len(encrypted_output3) assert len(encrypted_output1) == len(encrypted_output3)
@ -508,7 +533,10 @@ async def test_encrypted_backup_streamer_error(hass: HomeAssistant) -> None:
"test_backups/c0cb53bd.tar.decrypted", DOMAIN "test_backups/c0cb53bd.tar.decrypted", DOMAIN
) )
backup = AgentBackup( backup = AgentBackup(
addons=["addon_1", "addon_2"], addons=[
AddonInfo(name="Core 1", slug="core1", version="1.0.0"),
AddonInfo(name="Core 2", slug="core2", version="1.0.0"),
],
backup_id="1234", backup_id="1234",
date="2024-12-02T07:23:58.261875-05:00", date="2024-12-02T07:23:58.261875-05:00",
database_included=False, database_included=False,

View File

@ -15,7 +15,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.util.dt import utcnow from homeassistant.util.dt import utcnow
from . import FritzDeviceCoverMock, FritzDeviceSwitchMock from . import FritzDeviceCoverMock, FritzDeviceSwitchMock, FritzEntityBaseMock
from .const import MOCK_CONFIG from .const import MOCK_CONFIG
from tests.common import MockConfigEntry, async_fire_time_changed from tests.common import MockConfigEntry, async_fire_time_changed
@ -84,6 +84,8 @@ async def test_coordinator_automatic_registry_cleanup(
entity_registry: er.EntityRegistry, entity_registry: er.EntityRegistry,
) -> None: ) -> None:
"""Test automatic registry cleanup.""" """Test automatic registry cleanup."""
# init with 2 devices and 1 template
fritz().get_devices.return_value = [ fritz().get_devices.return_value = [
FritzDeviceSwitchMock( FritzDeviceSwitchMock(
ain="fake ain switch", ain="fake ain switch",
@ -96,6 +98,13 @@ async def test_coordinator_automatic_registry_cleanup(
name="fake_cover", name="fake_cover",
), ),
] ]
fritz().get_templates.return_value = [
FritzEntityBaseMock(
ain="fake ain template",
device_and_unit_id=("fake ain template", None),
name="fake_template",
)
]
entry = MockConfigEntry( entry = MockConfigEntry(
domain=FB_DOMAIN, domain=FB_DOMAIN,
data=MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], data=MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0],
@ -105,9 +114,10 @@ async def test_coordinator_automatic_registry_cleanup(
await hass.config_entries.async_setup(entry.entry_id) await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done(wait_background_tasks=True) await hass.async_block_till_done(wait_background_tasks=True)
assert len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) == 19 assert len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) == 20
assert len(dr.async_entries_for_config_entry(device_registry, entry.entry_id)) == 2 assert len(dr.async_entries_for_config_entry(device_registry, entry.entry_id)) == 3
# remove one device, keep the template
fritz().get_devices.return_value = [ fritz().get_devices.return_value = [
FritzDeviceSwitchMock( FritzDeviceSwitchMock(
ain="fake ain switch", ain="fake ain switch",
@ -119,5 +129,14 @@ async def test_coordinator_automatic_registry_cleanup(
async_fire_time_changed(hass, utcnow() + timedelta(seconds=35)) async_fire_time_changed(hass, utcnow() + timedelta(seconds=35))
await hass.async_block_till_done(wait_background_tasks=True) await hass.async_block_till_done(wait_background_tasks=True)
assert len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) == 13
assert len(dr.async_entries_for_config_entry(device_registry, entry.entry_id)) == 2
# remove the template, keep the device
fritz().get_templates.return_value = []
async_fire_time_changed(hass, utcnow() + timedelta(seconds=35))
await hass.async_block_till_done(wait_background_tasks=True)
assert len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) == 12 assert len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) == 12
assert len(dr.async_entries_for_config_entry(device_registry, entry.entry_id)) == 1 assert len(dr.async_entries_for_config_entry(device_registry, entry.entry_id)) == 1

View File

@ -34,9 +34,11 @@ from homeassistant.const import (
ATTR_DEVICE_CLASS, ATTR_DEVICE_CLASS,
ATTR_ENTITY_ID, ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES, ATTR_SUPPORTED_FEATURES,
ATTR_UNIT_OF_MEASUREMENT,
STATE_OFF, STATE_OFF,
STATE_ON, STATE_ON,
STATE_UNAVAILABLE, STATE_UNAVAILABLE,
UnitOfTemperature,
) )
from homeassistant.core import Event, HomeAssistant from homeassistant.core import Event, HomeAssistant
@ -437,6 +439,22 @@ async def test_expose_linked_sensors(
assert acc.char_air_quality.value == 1 assert acc.char_air_quality.value == 1
assert len(broker.mock_calls) == 0 assert len(broker.mock_calls) == 0
# Updated temperature with different unit should reflect in HomeKit
broker = MagicMock()
acc.char_current_temperature.broker = broker
hass.states.async_set(
temperature_entity_id,
60,
{
ATTR_DEVICE_CLASS: SensorDeviceClass.TEMPERATURE,
ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.FAHRENHEIT,
},
)
await hass.async_block_till_done()
assert acc.char_current_temperature.value == 15.6
assert len(broker.mock_calls) == 2
broker.reset_mock()
# Updated temperature should reflect in HomeKit # Updated temperature should reflect in HomeKit
broker = MagicMock() broker = MagicMock()
acc.char_current_temperature.broker = broker acc.char_current_temperature.broker = broker

View File

@ -39,7 +39,7 @@ from homeassistant.helpers import (
entity_registry as er, entity_registry as er,
issue_registry as ir, issue_registry as ir,
) )
from homeassistant.helpers.device_registry import format_mac from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, format_mac
from homeassistant.setup import async_setup_component from homeassistant.setup import async_setup_component
from .conftest import ( from .conftest import (
@ -51,6 +51,7 @@ from .conftest import (
TEST_HOST, TEST_HOST,
TEST_HOST_MODEL, TEST_HOST_MODEL,
TEST_MAC, TEST_MAC,
TEST_MAC_CAM,
TEST_NVR_NAME, TEST_NVR_NAME,
TEST_PORT, TEST_PORT,
TEST_PRIVACY, TEST_PRIVACY,
@ -614,6 +615,55 @@ async def test_migrate_with_already_existing_entity(
assert entity_registry.async_get_entity_id(domain, DOMAIN, new_id) assert entity_registry.async_get_entity_id(domain, DOMAIN, new_id)
async def test_cleanup_mac_connection(
hass: HomeAssistant,
config_entry: MockConfigEntry,
reolink_connect: MagicMock,
entity_registry: er.EntityRegistry,
device_registry: dr.DeviceRegistry,
) -> None:
"""Test cleanup of the MAC of a IPC which was set to the MAC of the host."""
reolink_connect.channels = [0]
reolink_connect.baichuan.mac_address.return_value = None
entity_id = f"{TEST_UID}_{TEST_UID_CAM}_record_audio"
dev_id = f"{TEST_UID}_{TEST_UID_CAM}"
domain = Platform.SWITCH
dev_entry = device_registry.async_get_or_create(
identifiers={(DOMAIN, dev_id)},
connections={(CONNECTION_NETWORK_MAC, TEST_MAC)},
config_entry_id=config_entry.entry_id,
disabled_by=None,
)
entity_registry.async_get_or_create(
domain=domain,
platform=DOMAIN,
unique_id=entity_id,
config_entry=config_entry,
suggested_object_id=entity_id,
disabled_by=None,
device_id=dev_entry.id,
)
assert entity_registry.async_get_entity_id(domain, DOMAIN, entity_id)
device = device_registry.async_get_device(identifiers={(DOMAIN, dev_id)})
assert device
assert device.connections == {(CONNECTION_NETWORK_MAC, TEST_MAC)}
# setup CH 0 and host entities/device
with patch("homeassistant.components.reolink.PLATFORMS", [domain]):
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert entity_registry.async_get_entity_id(domain, DOMAIN, entity_id)
device = device_registry.async_get_device(identifiers={(DOMAIN, dev_id)})
assert device
assert device.connections == set()
reolink_connect.baichuan.mac_address.return_value = TEST_MAC_CAM
async def test_no_repair_issue( async def test_no_repair_issue(
hass: HomeAssistant, config_entry: MockConfigEntry, issue_registry: ir.IssueRegistry hass: HomeAssistant, config_entry: MockConfigEntry, issue_registry: ir.IssueRegistry
) -> None: ) -> None:

View File

@ -5518,10 +5518,18 @@ async def test_restore_nvm(
# Set up mocks for the controller events # Set up mocks for the controller events
controller = client.driver.controller controller = client.driver.controller
# Test restore success async def async_send_command_driver_ready(
with patch.object( message: dict[str, Any],
controller, "async_restore_nvm_base64", return_value=None require_schema: int | None = None,
) as mock_restore: ) -> dict:
"""Send a command and get a response."""
client.driver.emit(
"driver ready", {"event": "driver ready", "source": "driver"}
)
return {}
client.async_send_command.side_effect = async_send_command_driver_ready
# Send the subscription request # Send the subscription request
await ws_client.send_json_auto_id( await ws_client.send_json_auto_id(
{ {
@ -5572,17 +5580,75 @@ async def test_restore_nvm(
assert msg["event"]["bytesWritten"] == 50 assert msg["event"]["bytesWritten"] == 50
assert msg["event"]["total"] == 100 assert msg["event"]["total"] == 100
# Wait for the restore to complete
await hass.async_block_till_done() await hass.async_block_till_done()
# Verify the restore was called # Verify the restore was called
assert mock_restore.called # The first call is the relevant one for nvm restore.
assert client.async_send_command.call_count == 3
assert client.async_send_command.call_args_list[0] == call(
{
"command": "controller.restore_nvm",
"nvmData": "dGVzdA==",
},
require_schema=14,
)
client.async_send_command.reset_mock()
# Test sending command with driver not ready and timeout.
async def async_send_command_no_driver_ready(
message: dict[str, Any],
require_schema: int | None = None,
) -> dict:
"""Send a command and get a response."""
return {}
client.async_send_command.side_effect = async_send_command_no_driver_ready
with patch(
"homeassistant.components.zwave_js.api.RESTORE_NVM_DRIVER_READY_TIMEOUT",
new=0,
):
# Send the subscription request
await ws_client.send_json_auto_id(
{
"type": "zwave_js/restore_nvm",
"entry_id": integration.entry_id,
"data": "dGVzdA==", # base64 encoded "test"
}
)
# Verify the finished event first
msg = await ws_client.receive_json()
assert msg["type"] == "event"
assert msg["event"]["event"] == "finished"
# Verify subscription success
msg = await ws_client.receive_json()
assert msg["type"] == "result"
assert msg["success"] is True
await hass.async_block_till_done()
# Verify the restore was called
# The first call is the relevant one for nvm restore.
assert client.async_send_command.call_count == 3
assert client.async_send_command.call_args_list[0] == call(
{
"command": "controller.restore_nvm",
"nvmData": "dGVzdA==",
},
require_schema=14,
)
client.async_send_command.reset_mock()
# Test restore failure # Test restore failure
with patch.object( with patch(
controller, f"{CONTROLLER_PATCH_PREFIX}.async_restore_nvm_base64",
"async_restore_nvm_base64", side_effect=FailedZWaveCommand("failed_command", 1, "error message"),
side_effect=FailedCommand("failed_command", "Restore failed"),
): ):
# Send the subscription request # Send the subscription request
await ws_client.send_json_auto_id( await ws_client.send_json_auto_id(
@ -5596,7 +5662,7 @@ async def test_restore_nvm(
# Verify error response # Verify error response
msg = await ws_client.receive_json() msg = await ws_client.receive_json()
assert not msg["success"] assert not msg["success"]
assert msg["error"]["code"] == "Restore failed" assert msg["error"]["code"] == "zwave_error"
# Test entry_id not found # Test entry_id not found
await ws_client.send_json_auto_id( await ws_client.send_json_auto_id(

View File

@ -431,10 +431,11 @@ async def test_rediscovery(
async def test_aeotec_smart_switch_7( async def test_aeotec_smart_switch_7(
hass: HomeAssistant, hass: HomeAssistant,
entity_registry: er.EntityRegistry,
aeotec_smart_switch_7: Node, aeotec_smart_switch_7: Node,
integration: MockConfigEntry, integration: MockConfigEntry,
) -> None: ) -> None:
"""Test that Smart Switch 7 has a light and a switch entity.""" """Test Smart Switch 7 discovery."""
state = hass.states.get("light.smart_switch_7") state = hass.states.get("light.smart_switch_7")
assert state assert state
assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [
@ -443,3 +444,9 @@ async def test_aeotec_smart_switch_7(
state = hass.states.get("switch.smart_switch_7") state = hass.states.get("switch.smart_switch_7")
assert state assert state
state = hass.states.get("button.smart_switch_7_reset_accumulated_values")
assert state
entity_entry = entity_registry.async_get(state.entity_id)
assert entity_entry
assert entity_entry.entity_category is EntityCategory.CONFIG