This commit is contained in:
Franck Nijhof 2025-05-09 17:03:40 +02:00 committed by GitHub
commit 00627b82e0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
35 changed files with 430 additions and 132 deletions

View File

@ -22,7 +22,7 @@ from . import util
from .agent import BackupAgent
from .const import DATA_MANAGER
from .manager import BackupManager
from .models import BackupNotFound
from .models import AgentBackup, BackupNotFound
@callback
@ -85,7 +85,15 @@ class DownloadBackupView(HomeAssistantView):
request, headers, backup_id, agent_id, agent, manager
)
return await self._send_backup_with_password(
hass, request, headers, backup_id, agent_id, password, agent, manager
hass,
backup,
request,
headers,
backup_id,
agent_id,
password,
agent,
manager,
)
except BackupNotFound:
return Response(status=HTTPStatus.NOT_FOUND)
@ -116,6 +124,7 @@ class DownloadBackupView(HomeAssistantView):
async def _send_backup_with_password(
self,
hass: HomeAssistant,
backup: AgentBackup,
request: Request,
headers: dict[istr, str],
backup_id: str,
@ -144,7 +153,8 @@ class DownloadBackupView(HomeAssistantView):
stream = util.AsyncIteratorWriter(hass)
worker = threading.Thread(
target=util.decrypt_backup, args=[reader, stream, password, on_done, 0, []]
target=util.decrypt_backup,
args=[backup, reader, stream, password, on_done, 0, []],
)
try:
worker.start()

View File

@ -295,13 +295,26 @@ def validate_password_stream(
raise BackupEmpty
def _get_expected_archives(backup: AgentBackup) -> set[str]:
"""Get the expected archives in the backup."""
expected_archives = set()
if backup.homeassistant_included:
expected_archives.add("homeassistant")
for addon in backup.addons:
expected_archives.add(addon.slug)
for folder in backup.folders:
expected_archives.add(folder.value)
return expected_archives
def decrypt_backup(
backup: AgentBackup,
input_stream: IO[bytes],
output_stream: IO[bytes],
password: str | None,
on_done: Callable[[Exception | None], None],
minimum_size: int,
nonces: list[bytes],
nonces: NonceGenerator,
) -> None:
"""Decrypt a backup."""
error: Exception | None = None
@ -315,7 +328,7 @@ def decrypt_backup(
fileobj=output_stream, mode="w|", bufsize=BUF_SIZE
) as output_tar,
):
_decrypt_backup(input_tar, output_tar, password)
_decrypt_backup(backup, input_tar, output_tar, password)
except (DecryptError, SecureTarError, tarfile.TarError) as err:
LOGGER.warning("Error decrypting backup: %s", err)
error = err
@ -333,15 +346,18 @@ def decrypt_backup(
def _decrypt_backup(
backup: AgentBackup,
input_tar: tarfile.TarFile,
output_tar: tarfile.TarFile,
password: str | None,
) -> None:
"""Decrypt a backup."""
expected_archives = _get_expected_archives(backup)
for obj in input_tar:
# We compare with PurePath to avoid issues with different path separators,
# for example when backup.json is added as "./backup.json"
if PurePath(obj.name) == PurePath("backup.json"):
object_path = PurePath(obj.name)
if object_path == PurePath("backup.json"):
# Rewrite the backup.json file to indicate that the backup is decrypted
if not (reader := input_tar.extractfile(obj)):
raise DecryptError
@ -352,7 +368,13 @@ def _decrypt_backup(
metadata_obj.size = len(updated_metadata_b)
output_tar.addfile(metadata_obj, BytesIO(updated_metadata_b))
continue
if not obj.name.endswith((".tar", ".tgz", ".tar.gz")):
prefix, _, suffix = object_path.name.partition(".")
if suffix not in ("tar", "tgz", "tar.gz"):
LOGGER.debug("Unknown file %s will not be decrypted", obj.name)
output_tar.addfile(obj, input_tar.extractfile(obj))
continue
if prefix not in expected_archives:
LOGGER.debug("Unknown inner tar file %s will not be decrypted", obj.name)
output_tar.addfile(obj, input_tar.extractfile(obj))
continue
istf = SecureTarFile(
@ -371,12 +393,13 @@ def _decrypt_backup(
def encrypt_backup(
backup: AgentBackup,
input_stream: IO[bytes],
output_stream: IO[bytes],
password: str | None,
on_done: Callable[[Exception | None], None],
minimum_size: int,
nonces: list[bytes],
nonces: NonceGenerator,
) -> None:
"""Encrypt a backup."""
error: Exception | None = None
@ -390,7 +413,7 @@ def encrypt_backup(
fileobj=output_stream, mode="w|", bufsize=BUF_SIZE
) as output_tar,
):
_encrypt_backup(input_tar, output_tar, password, nonces)
_encrypt_backup(backup, input_tar, output_tar, password, nonces)
except (EncryptError, SecureTarError, tarfile.TarError) as err:
LOGGER.warning("Error encrypting backup: %s", err)
error = err
@ -408,17 +431,20 @@ def encrypt_backup(
def _encrypt_backup(
backup: AgentBackup,
input_tar: tarfile.TarFile,
output_tar: tarfile.TarFile,
password: str | None,
nonces: list[bytes],
nonces: NonceGenerator,
) -> None:
"""Encrypt a backup."""
inner_tar_idx = 0
expected_archives = _get_expected_archives(backup)
for obj in input_tar:
# We compare with PurePath to avoid issues with different path separators,
# for example when backup.json is added as "./backup.json"
if PurePath(obj.name) == PurePath("backup.json"):
object_path = PurePath(obj.name)
if object_path == PurePath("backup.json"):
# Rewrite the backup.json file to indicate that the backup is encrypted
if not (reader := input_tar.extractfile(obj)):
raise EncryptError
@ -429,16 +455,21 @@ def _encrypt_backup(
metadata_obj.size = len(updated_metadata_b)
output_tar.addfile(metadata_obj, BytesIO(updated_metadata_b))
continue
if not obj.name.endswith((".tar", ".tgz", ".tar.gz")):
prefix, _, suffix = object_path.name.partition(".")
if suffix not in ("tar", "tgz", "tar.gz"):
LOGGER.debug("Unknown file %s will not be encrypted", obj.name)
output_tar.addfile(obj, input_tar.extractfile(obj))
continue
if prefix not in expected_archives:
LOGGER.debug("Unknown inner tar file %s will not be encrypted", obj.name)
continue
istf = SecureTarFile(
None, # Not used
gzip=False,
key=password_to_key(password) if password is not None else None,
mode="r",
fileobj=input_tar.extractfile(obj),
nonce=nonces[inner_tar_idx],
nonce=nonces.get(inner_tar_idx),
)
inner_tar_idx += 1
with istf.encrypt(obj) as encrypted:
@ -456,17 +487,33 @@ class _CipherWorkerStatus:
writer: AsyncIteratorWriter
class NonceGenerator:
"""Generate nonces for encryption."""
def __init__(self) -> None:
"""Initialize the generator."""
self._nonces: dict[int, bytes] = {}
def get(self, index: int) -> bytes:
"""Get a nonce for the given index."""
if index not in self._nonces:
# Generate a new nonce for the given index
self._nonces[index] = os.urandom(16)
return self._nonces[index]
class _CipherBackupStreamer:
"""Encrypt or decrypt a backup."""
_cipher_func: Callable[
[
AgentBackup,
IO[bytes],
IO[bytes],
str | None,
Callable[[Exception | None], None],
int,
list[bytes],
NonceGenerator,
],
None,
]
@ -484,7 +531,7 @@ class _CipherBackupStreamer:
self._hass = hass
self._open_stream = open_stream
self._password = password
self._nonces: list[bytes] = []
self._nonces = NonceGenerator()
def size(self) -> int:
"""Return the maximum size of the decrypted or encrypted backup."""
@ -508,7 +555,15 @@ class _CipherBackupStreamer:
writer = AsyncIteratorWriter(self._hass)
worker = threading.Thread(
target=self._cipher_func,
args=[reader, writer, self._password, on_done, self.size(), self._nonces],
args=[
self._backup,
reader,
writer,
self._password,
on_done,
self.size(),
self._nonces,
],
)
worker_status = _CipherWorkerStatus(
done=asyncio.Event(), reader=reader, thread=worker, writer=writer
@ -538,17 +593,6 @@ class DecryptedBackupStreamer(_CipherBackupStreamer):
class EncryptedBackupStreamer(_CipherBackupStreamer):
"""Encrypt a backup."""
def __init__(
self,
hass: HomeAssistant,
backup: AgentBackup,
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
password: str | None,
) -> None:
"""Initialize."""
super().__init__(hass, backup, open_stream, password)
self._nonces = [os.urandom(16) for _ in range(self._num_tar_files())]
_cipher_func = staticmethod(encrypt_backup)
def backup(self) -> AgentBackup:

View File

@ -68,7 +68,7 @@ async def async_validate_hostname(
result = False
with contextlib.suppress(DNSError):
result = bool(
await aiodns.DNSResolver(
await aiodns.DNSResolver( # type: ignore[call-overload]
nameservers=[resolver], udp_port=port, tcp_port=port
).query(hostname, qtype)
)

View File

@ -5,5 +5,5 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/dnsip",
"iot_class": "cloud_polling",
"requirements": ["aiodns==3.3.0"]
"requirements": ["aiodns==3.4.0"]
}

View File

@ -106,7 +106,7 @@ class WanIpSensor(SensorEntity):
async def async_update(self) -> None:
"""Get the current DNS IP address for hostname."""
try:
response = await self.resolver.query(self.hostname, self.querytype)
response = await self.resolver.query(self.hostname, self.querytype) # type: ignore[call-overload]
except DNSError as err:
_LOGGER.warning("Exception while resolving host: %s", err)
response = None

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/forecast_solar",
"integration_type": "service",
"iot_class": "cloud_polling",
"requirements": ["forecast-solar==4.1.0"]
"requirements": ["forecast-solar==4.2.0"]
}

View File

@ -92,7 +92,7 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
available_main_ains = [
ain
for ain, dev in data.devices.items()
for ain, dev in data.devices.items() | data.templates.items()
if dev.device_and_unit_id[1] is None
]
device_reg = dr.async_get(self.hass)

View File

@ -45,7 +45,15 @@ type FroniusConfigEntry = ConfigEntry[FroniusSolarNet]
async def async_setup_entry(hass: HomeAssistant, entry: FroniusConfigEntry) -> bool:
"""Set up fronius from a config entry."""
host = entry.data[CONF_HOST]
fronius = Fronius(async_get_clientsession(hass), host)
fronius = Fronius(
async_get_clientsession(
hass,
# Fronius Gen24 firmware 1.35.4-1 redirects to HTTPS with self-signed
# certificate. See https://github.com/home-assistant/core/issues/138881
verify_ssl=False,
),
host,
)
solar_net = FroniusSolarNet(hass, entry, fronius)
await solar_net.init_devices()

View File

@ -20,5 +20,5 @@
"documentation": "https://www.home-assistant.io/integrations/frontend",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20250507.0"]
"requirements": ["home-assistant-frontend==20250509.0"]
}

View File

@ -8,7 +8,13 @@ from pyhap.const import CATEGORY_AIR_PURIFIER
from pyhap.service import Service
from pyhap.util import callback as pyhap_callback
from homeassistant.const import STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN
from homeassistant.const import (
ATTR_UNIT_OF_MEASUREMENT,
STATE_ON,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
UnitOfTemperature,
)
from homeassistant.core import (
Event,
EventStateChangedData,
@ -43,7 +49,12 @@ from .const import (
THRESHOLD_FILTER_CHANGE_NEEDED,
)
from .type_fans import ATTR_PRESET_MODE, CHAR_ROTATION_SPEED, Fan
from .util import cleanup_name_for_homekit, convert_to_float, density_to_air_quality
from .util import (
cleanup_name_for_homekit,
convert_to_float,
density_to_air_quality,
temperature_to_homekit,
)
_LOGGER = logging.getLogger(__name__)
@ -345,8 +356,13 @@ class AirPurifier(Fan):
):
return
unit = new_state.attributes.get(
ATTR_UNIT_OF_MEASUREMENT, UnitOfTemperature.CELSIUS
)
current_temperature = temperature_to_homekit(current_temperature, unit)
_LOGGER.debug(
"%s: Linked temperature sensor %s changed to %d",
"%s: Linked temperature sensor %s changed to %d °C",
self.entity_id,
self.linked_temperature_sensor,
current_temperature,

View File

@ -37,5 +37,5 @@
"iot_class": "cloud_push",
"loggers": ["pylamarzocco"],
"quality_scale": "platinum",
"requirements": ["pylamarzocco==2.0.0"]
"requirements": ["pylamarzocco==2.0.1"]
}

View File

@ -132,17 +132,18 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up sensor entities."""
coordinator = entry.runtime_data.config_coordinator
config_coordinator = entry.runtime_data.config_coordinator
statistic_coordinators = entry.runtime_data.statistics_coordinator
entities = [
LaMarzoccoSensorEntity(coordinator, description)
LaMarzoccoSensorEntity(config_coordinator, description)
for description in ENTITIES
if description.supported_fn(coordinator)
if description.supported_fn(config_coordinator)
]
entities.extend(
LaMarzoccoStatisticSensorEntity(coordinator, description)
LaMarzoccoStatisticSensorEntity(statistic_coordinators, description)
for description in STATISTIC_ENTITIES
if description.supported_fn(coordinator)
if description.supported_fn(statistic_coordinators)
)
async_add_entities(entities)

View File

@ -6,7 +6,6 @@ import logging
from typing import Any
from pypoint import PointSession
from tempora.utc import fromtimestamp
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
@ -62,7 +61,9 @@ class PointDataUpdateCoordinator(DataUpdateCoordinator[dict[str, dict[str, Any]]
or device.device_id not in self.device_updates
or self.device_updates[device.device_id] < last_updated
):
self.device_updates[device.device_id] = last_updated or fromtimestamp(0)
self.device_updates[device.device_id] = (
last_updated or datetime.fromtimestamp(0)
)
self.data[device.device_id] = {
k: await device.sensor(k)
for k in ("temperature", "humidity", "sound_pressure")

View File

@ -23,7 +23,7 @@ from homeassistant.helpers import (
device_registry as dr,
entity_registry as er,
)
from homeassistant.helpers.device_registry import format_mac
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, format_mac
from homeassistant.helpers.event import async_call_later
from homeassistant.helpers.typing import ConfigType
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
@ -380,6 +380,14 @@ def migrate_entity_ids(
if ch is None or is_chime:
continue # Do not consider the NVR itself or chimes
# Check for wrongfully added MAC of the NVR/Hub to the camera
# Can be removed in HA 2025.12
host_connnection = (CONNECTION_NETWORK_MAC, host.api.mac_address)
if host_connnection in device.connections:
new_connections = device.connections.copy()
new_connections.remove(host_connnection)
device_reg.async_update_device(device.id, new_connections=new_connections)
ch_device_ids[device.id] = ch
if host.api.supported(ch, "UID") and device_uid[1] != host.api.camera_uid(ch):
if host.api.supported(None, "UID"):

View File

@ -97,6 +97,7 @@ SKU_TO_BASE_DEVICE = {
"LAP-V102S-AASR": "Vital100S", # Alt ID Model Vital100S
"LAP-V102S-WEU": "Vital100S", # Alt ID Model Vital100S
"LAP-V102S-WUK": "Vital100S", # Alt ID Model Vital100S
"LAP-V102S-AUSR": "Vital100S", # Alt ID Model Vital100S
"EverestAir": "EverestAir",
"LAP-EL551S-AUS": "EverestAir", # Alt ID Model EverestAir
"LAP-EL551S-AEUR": "EverestAir", # Alt ID Model EverestAir

View File

@ -88,6 +88,7 @@ from .const import (
DATA_CLIENT,
DOMAIN,
EVENT_DEVICE_ADDED_TO_REGISTRY,
RESTORE_NVM_DRIVER_READY_TIMEOUT,
USER_AGENT,
)
from .helpers import (
@ -3063,14 +3064,28 @@ async def websocket_restore_nvm(
)
)
@callback
def set_driver_ready(event: dict) -> None:
"Set the driver ready event."
wait_driver_ready.set()
wait_driver_ready = asyncio.Event()
# Set up subscription for progress events
connection.subscriptions[msg["id"]] = async_cleanup
msg[DATA_UNSUBSCRIBE] = unsubs = [
controller.on("nvm convert progress", forward_progress),
controller.on("nvm restore progress", forward_progress),
driver.once("driver ready", set_driver_ready),
]
await controller.async_restore_nvm_base64(msg["data"])
with suppress(TimeoutError):
async with asyncio.timeout(RESTORE_NVM_DRIVER_READY_TIMEOUT):
await wait_driver_ready.wait()
await hass.config_entries.async_reload(entry.entry_id)
connection.send_message(
websocket_api.event_message(
msg[ID],

View File

@ -67,6 +67,7 @@ from .const import (
CONF_USE_ADDON,
DATA_CLIENT,
DOMAIN,
RESTORE_NVM_DRIVER_READY_TIMEOUT,
)
_LOGGER = logging.getLogger(__name__)
@ -78,7 +79,6 @@ ADDON_SETUP_TIMEOUT = 5
ADDON_SETUP_TIMEOUT_ROUNDS = 40
CONF_EMULATE_HARDWARE = "emulate_hardware"
CONF_LOG_LEVEL = "log_level"
RESTORE_NVM_DRIVER_READY_TIMEOUT = 60
SERVER_VERSION_TIMEOUT = 10
ADDON_LOG_LEVELS = {

View File

@ -201,3 +201,7 @@ COVER_TILT_PROPERTY_KEYS: set[str | int | None] = {
WindowCoveringPropertyKey.VERTICAL_SLATS_ANGLE,
WindowCoveringPropertyKey.VERTICAL_SLATS_ANGLE_NO_POSITION,
}
# Other constants
RESTORE_NVM_DRIVER_READY_TIMEOUT = 60

View File

@ -1204,7 +1204,7 @@ DISCOVERY_SCHEMAS = [
property={RESET_METER_PROPERTY},
type={ValueType.BOOLEAN},
),
entity_category=EntityCategory.DIAGNOSTIC,
entity_category=EntityCategory.CONFIG,
),
ZWaveDiscoverySchema(
platform=Platform.BINARY_SENSOR,

View File

@ -25,7 +25,7 @@ if TYPE_CHECKING:
APPLICATION_NAME: Final = "HomeAssistant"
MAJOR_VERSION: Final = 2025
MINOR_VERSION: Final = 5
PATCH_VERSION: Final = "0"
PATCH_VERSION: Final = "1"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 2)

View File

@ -575,9 +575,11 @@ class DeviceRegistryItems[_EntryTypeT: (DeviceEntry, DeletedDeviceEntry)](
"""Unindex an entry."""
old_entry = self.data[key]
for connection in old_entry.connections:
del self._connections[connection]
if connection in self._connections:
del self._connections[connection]
for identifier in old_entry.identifiers:
del self._identifiers[identifier]
if identifier in self._identifiers:
del self._identifiers[identifier]
def get_entry(
self,

View File

@ -2,7 +2,7 @@
aiodhcpwatcher==1.1.1
aiodiscover==2.6.1
aiodns==3.3.0
aiodns==3.4.0
aiohasupervisor==0.3.1
aiohttp-asyncmdnsresolver==0.1.1
aiohttp-fast-zlib==0.2.3
@ -38,7 +38,7 @@ habluetooth==3.48.2
hass-nabucasa==0.96.0
hassil==2.2.3
home-assistant-bluetooth==1.13.1
home-assistant-frontend==20250507.0
home-assistant-frontend==20250509.0
home-assistant-intents==2025.5.7
httpx==0.28.1
ifaddr==0.2.0

View File

@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "homeassistant"
version = "2025.5.0"
version = "2025.5.1"
license = "Apache-2.0"
license-files = ["LICENSE*", "homeassistant/backports/LICENSE*"]
description = "Open-source home automation platform running on Python 3."
@ -23,7 +23,7 @@ classifiers = [
]
requires-python = ">=3.13.2"
dependencies = [
"aiodns==3.3.0",
"aiodns==3.4.0",
# Integrations may depend on hassio integration without listing it to
# change behavior based on presence of supervisor. Deprecated with #127228
# Lib can be removed with 2025.11

2
requirements.txt generated
View File

@ -3,7 +3,7 @@
-c homeassistant/package_constraints.txt
# Home Assistant Core
aiodns==3.3.0
aiodns==3.4.0
aiohasupervisor==0.3.1
aiohttp==3.11.18
aiohttp_cors==0.7.0

8
requirements_all.txt generated
View File

@ -223,7 +223,7 @@ aiodhcpwatcher==1.1.1
aiodiscover==2.6.1
# homeassistant.components.dnsip
aiodns==3.3.0
aiodns==3.4.0
# homeassistant.components.duke_energy
aiodukeenergy==0.3.0
@ -958,7 +958,7 @@ fnv-hash-fast==1.5.0
foobot_async==1.0.0
# homeassistant.components.forecast_solar
forecast-solar==4.1.0
forecast-solar==4.2.0
# homeassistant.components.fortios
fortiosapi==1.0.5
@ -1161,7 +1161,7 @@ hole==0.8.0
holidays==0.70
# homeassistant.components.frontend
home-assistant-frontend==20250507.0
home-assistant-frontend==20250509.0
# homeassistant.components.conversation
home-assistant-intents==2025.5.7
@ -2093,7 +2093,7 @@ pykwb==0.0.8
pylacrosse==0.4
# homeassistant.components.lamarzocco
pylamarzocco==2.0.0
pylamarzocco==2.0.1
# homeassistant.components.lastfm
pylast==5.1.0

View File

@ -211,7 +211,7 @@ aiodhcpwatcher==1.1.1
aiodiscover==2.6.1
# homeassistant.components.dnsip
aiodns==3.3.0
aiodns==3.4.0
# homeassistant.components.duke_energy
aiodukeenergy==0.3.0
@ -818,7 +818,7 @@ fnv-hash-fast==1.5.0
foobot_async==1.0.0
# homeassistant.components.forecast_solar
forecast-solar==4.1.0
forecast-solar==4.2.0
# homeassistant.components.freebox
freebox-api==1.2.2
@ -991,7 +991,7 @@ hole==0.8.0
holidays==0.70
# homeassistant.components.frontend
home-assistant-frontend==20250507.0
home-assistant-frontend==20250509.0
# homeassistant.components.conversation
home-assistant-intents==2025.5.7
@ -1708,7 +1708,7 @@ pykrakenapi==0.1.8
pykulersky==0.5.8
# homeassistant.components.lamarzocco
pylamarzocco==2.0.0
pylamarzocco==2.0.1
# homeassistant.components.lastfm
pylast==5.1.0

View File

@ -177,7 +177,7 @@ async def _test_downloading_encrypted_backup(
enc_metadata = json.loads(outer_tar.extractfile("./backup.json").read())
assert enc_metadata["protected"] is True
with (
outer_tar.extractfile("core.tar.gz") as inner_tar_file,
outer_tar.extractfile("homeassistant.tar.gz") as inner_tar_file,
pytest.raises(tarfile.ReadError, match="file could not be opened"),
):
# pylint: disable-next=consider-using-with
@ -209,7 +209,7 @@ async def _test_downloading_encrypted_backup(
dec_metadata = json.loads(outer_tar.extractfile("./backup.json").read())
assert dec_metadata == enc_metadata | {"protected": False}
with (
outer_tar.extractfile("core.tar.gz") as inner_tar_file,
outer_tar.extractfile("homeassistant.tar.gz") as inner_tar_file,
tarfile.open(fileobj=inner_tar_file, mode="r") as inner_tar,
):
assert inner_tar.getnames() == [

View File

@ -174,7 +174,10 @@ async def test_decrypted_backup_streamer(hass: HomeAssistant) -> None:
)
encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN)
backup = AgentBackup(
addons=["addon_1", "addon_2"],
addons=[
AddonInfo(name="Core 1", slug="core1", version="1.0.0"),
AddonInfo(name="Core 2", slug="core2", version="1.0.0"),
],
backup_id="1234",
date="2024-12-02T07:23:58.261875-05:00",
database_included=False,
@ -218,7 +221,10 @@ async def test_decrypted_backup_streamer_interrupt_stuck_reader(
"""Test the decrypted backup streamer."""
encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN)
backup = AgentBackup(
addons=["addon_1", "addon_2"],
addons=[
AddonInfo(name="Core 1", slug="core1", version="1.0.0"),
AddonInfo(name="Core 2", slug="core2", version="1.0.0"),
],
backup_id="1234",
date="2024-12-02T07:23:58.261875-05:00",
database_included=False,
@ -253,7 +259,10 @@ async def test_decrypted_backup_streamer_interrupt_stuck_writer(
"""Test the decrypted backup streamer."""
encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN)
backup = AgentBackup(
addons=["addon_1", "addon_2"],
addons=[
AddonInfo(name="Core 1", slug="core1", version="1.0.0"),
AddonInfo(name="Core 2", slug="core2", version="1.0.0"),
],
backup_id="1234",
date="2024-12-02T07:23:58.261875-05:00",
database_included=False,
@ -283,7 +292,10 @@ async def test_decrypted_backup_streamer_wrong_password(hass: HomeAssistant) ->
"""Test the decrypted backup streamer with wrong password."""
encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN)
backup = AgentBackup(
addons=["addon_1", "addon_2"],
addons=[
AddonInfo(name="Core 1", slug="core1", version="1.0.0"),
AddonInfo(name="Core 2", slug="core2", version="1.0.0"),
],
backup_id="1234",
date="2024-12-02T07:23:58.261875-05:00",
database_included=False,
@ -320,7 +332,10 @@ async def test_encrypted_backup_streamer(hass: HomeAssistant) -> None:
)
encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN)
backup = AgentBackup(
addons=["addon_1", "addon_2"],
addons=[
AddonInfo(name="Core 1", slug="core1", version="1.0.0"),
AddonInfo(name="Core 2", slug="core2", version="1.0.0"),
],
backup_id="1234",
date="2024-12-02T07:23:58.261875-05:00",
database_included=False,
@ -353,15 +368,16 @@ async def test_encrypted_backup_streamer(hass: HomeAssistant) -> None:
bytes.fromhex("00000000000000000000000000000000"),
)
encryptor = EncryptedBackupStreamer(hass, backup, open_backup, "hunter2")
assert encryptor.backup() == dataclasses.replace(
backup, protected=True, size=backup.size + len(expected_padding)
)
encrypted_stream = await encryptor.open_stream()
encrypted_output = b""
async for chunk in encrypted_stream:
encrypted_output += chunk
await encryptor.wait()
assert encryptor.backup() == dataclasses.replace(
backup, protected=True, size=backup.size + len(expected_padding)
)
encrypted_stream = await encryptor.open_stream()
encrypted_output = b""
async for chunk in encrypted_stream:
encrypted_output += chunk
await encryptor.wait()
# Expect the output to match the stored encrypted backup file, with additional
# padding.
@ -377,7 +393,10 @@ async def test_encrypted_backup_streamer_interrupt_stuck_reader(
"test_backups/c0cb53bd.tar.decrypted", DOMAIN
)
backup = AgentBackup(
addons=["addon_1", "addon_2"],
addons=[
AddonInfo(name="Core 1", slug="core1", version="1.0.0"),
AddonInfo(name="Core 2", slug="core2", version="1.0.0"),
],
backup_id="1234",
date="2024-12-02T07:23:58.261875-05:00",
database_included=False,
@ -414,7 +433,10 @@ async def test_encrypted_backup_streamer_interrupt_stuck_writer(
"test_backups/c0cb53bd.tar.decrypted", DOMAIN
)
backup = AgentBackup(
addons=["addon_1", "addon_2"],
addons=[
AddonInfo(name="Core 1", slug="core1", version="1.0.0"),
AddonInfo(name="Core 2", slug="core2", version="1.0.0"),
],
backup_id="1234",
date="2024-12-02T07:23:58.261875-05:00",
database_included=False,
@ -447,7 +469,10 @@ async def test_encrypted_backup_streamer_random_nonce(hass: HomeAssistant) -> No
)
encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN)
backup = AgentBackup(
addons=["addon_1", "addon_2"],
addons=[
AddonInfo(name="Core 1", slug="core1", version="1.0.0"),
AddonInfo(name="Core 2", slug="core2", version="1.0.0"),
],
backup_id="1234",
date="2024-12-02T07:23:58.261875-05:00",
database_included=False,
@ -490,7 +515,7 @@ async def test_encrypted_backup_streamer_random_nonce(hass: HomeAssistant) -> No
await encryptor1.wait()
await encryptor2.wait()
# Output from the two streames should differ but have the same length.
# Output from the two streams should differ but have the same length.
assert encrypted_output1 != encrypted_output3
assert len(encrypted_output1) == len(encrypted_output3)
@ -508,7 +533,10 @@ async def test_encrypted_backup_streamer_error(hass: HomeAssistant) -> None:
"test_backups/c0cb53bd.tar.decrypted", DOMAIN
)
backup = AgentBackup(
addons=["addon_1", "addon_2"],
addons=[
AddonInfo(name="Core 1", slug="core1", version="1.0.0"),
AddonInfo(name="Core 2", slug="core2", version="1.0.0"),
],
backup_id="1234",
date="2024-12-02T07:23:58.261875-05:00",
database_included=False,

View File

@ -15,7 +15,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.util.dt import utcnow
from . import FritzDeviceCoverMock, FritzDeviceSwitchMock
from . import FritzDeviceCoverMock, FritzDeviceSwitchMock, FritzEntityBaseMock
from .const import MOCK_CONFIG
from tests.common import MockConfigEntry, async_fire_time_changed
@ -84,6 +84,8 @@ async def test_coordinator_automatic_registry_cleanup(
entity_registry: er.EntityRegistry,
) -> None:
"""Test automatic registry cleanup."""
# init with 2 devices and 1 template
fritz().get_devices.return_value = [
FritzDeviceSwitchMock(
ain="fake ain switch",
@ -96,6 +98,13 @@ async def test_coordinator_automatic_registry_cleanup(
name="fake_cover",
),
]
fritz().get_templates.return_value = [
FritzEntityBaseMock(
ain="fake ain template",
device_and_unit_id=("fake ain template", None),
name="fake_template",
)
]
entry = MockConfigEntry(
domain=FB_DOMAIN,
data=MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0],
@ -105,9 +114,10 @@ async def test_coordinator_automatic_registry_cleanup(
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done(wait_background_tasks=True)
assert len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) == 19
assert len(dr.async_entries_for_config_entry(device_registry, entry.entry_id)) == 2
assert len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) == 20
assert len(dr.async_entries_for_config_entry(device_registry, entry.entry_id)) == 3
# remove one device, keep the template
fritz().get_devices.return_value = [
FritzDeviceSwitchMock(
ain="fake ain switch",
@ -119,5 +129,14 @@ async def test_coordinator_automatic_registry_cleanup(
async_fire_time_changed(hass, utcnow() + timedelta(seconds=35))
await hass.async_block_till_done(wait_background_tasks=True)
assert len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) == 13
assert len(dr.async_entries_for_config_entry(device_registry, entry.entry_id)) == 2
# remove the template, keep the device
fritz().get_templates.return_value = []
async_fire_time_changed(hass, utcnow() + timedelta(seconds=35))
await hass.async_block_till_done(wait_background_tasks=True)
assert len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) == 12
assert len(dr.async_entries_for_config_entry(device_registry, entry.entry_id)) == 1

View File

@ -34,9 +34,11 @@ from homeassistant.const import (
ATTR_DEVICE_CLASS,
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
ATTR_UNIT_OF_MEASUREMENT,
STATE_OFF,
STATE_ON,
STATE_UNAVAILABLE,
UnitOfTemperature,
)
from homeassistant.core import Event, HomeAssistant
@ -437,6 +439,22 @@ async def test_expose_linked_sensors(
assert acc.char_air_quality.value == 1
assert len(broker.mock_calls) == 0
# Updated temperature with different unit should reflect in HomeKit
broker = MagicMock()
acc.char_current_temperature.broker = broker
hass.states.async_set(
temperature_entity_id,
60,
{
ATTR_DEVICE_CLASS: SensorDeviceClass.TEMPERATURE,
ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.FAHRENHEIT,
},
)
await hass.async_block_till_done()
assert acc.char_current_temperature.value == 15.6
assert len(broker.mock_calls) == 2
broker.reset_mock()
# Updated temperature should reflect in HomeKit
broker = MagicMock()
acc.char_current_temperature.broker = broker

View File

@ -39,7 +39,7 @@ from homeassistant.helpers import (
entity_registry as er,
issue_registry as ir,
)
from homeassistant.helpers.device_registry import format_mac
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, format_mac
from homeassistant.setup import async_setup_component
from .conftest import (
@ -51,6 +51,7 @@ from .conftest import (
TEST_HOST,
TEST_HOST_MODEL,
TEST_MAC,
TEST_MAC_CAM,
TEST_NVR_NAME,
TEST_PORT,
TEST_PRIVACY,
@ -614,6 +615,55 @@ async def test_migrate_with_already_existing_entity(
assert entity_registry.async_get_entity_id(domain, DOMAIN, new_id)
async def test_cleanup_mac_connection(
hass: HomeAssistant,
config_entry: MockConfigEntry,
reolink_connect: MagicMock,
entity_registry: er.EntityRegistry,
device_registry: dr.DeviceRegistry,
) -> None:
"""Test cleanup of the MAC of a IPC which was set to the MAC of the host."""
reolink_connect.channels = [0]
reolink_connect.baichuan.mac_address.return_value = None
entity_id = f"{TEST_UID}_{TEST_UID_CAM}_record_audio"
dev_id = f"{TEST_UID}_{TEST_UID_CAM}"
domain = Platform.SWITCH
dev_entry = device_registry.async_get_or_create(
identifiers={(DOMAIN, dev_id)},
connections={(CONNECTION_NETWORK_MAC, TEST_MAC)},
config_entry_id=config_entry.entry_id,
disabled_by=None,
)
entity_registry.async_get_or_create(
domain=domain,
platform=DOMAIN,
unique_id=entity_id,
config_entry=config_entry,
suggested_object_id=entity_id,
disabled_by=None,
device_id=dev_entry.id,
)
assert entity_registry.async_get_entity_id(domain, DOMAIN, entity_id)
device = device_registry.async_get_device(identifiers={(DOMAIN, dev_id)})
assert device
assert device.connections == {(CONNECTION_NETWORK_MAC, TEST_MAC)}
# setup CH 0 and host entities/device
with patch("homeassistant.components.reolink.PLATFORMS", [domain]):
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert entity_registry.async_get_entity_id(domain, DOMAIN, entity_id)
device = device_registry.async_get_device(identifiers={(DOMAIN, dev_id)})
assert device
assert device.connections == set()
reolink_connect.baichuan.mac_address.return_value = TEST_MAC_CAM
async def test_no_repair_issue(
hass: HomeAssistant, config_entry: MockConfigEntry, issue_registry: ir.IssueRegistry
) -> None:

View File

@ -5518,10 +5518,98 @@ async def test_restore_nvm(
# Set up mocks for the controller events
controller = client.driver.controller
# Test restore success
with patch.object(
controller, "async_restore_nvm_base64", return_value=None
) as mock_restore:
async def async_send_command_driver_ready(
message: dict[str, Any],
require_schema: int | None = None,
) -> dict:
"""Send a command and get a response."""
client.driver.emit(
"driver ready", {"event": "driver ready", "source": "driver"}
)
return {}
client.async_send_command.side_effect = async_send_command_driver_ready
# Send the subscription request
await ws_client.send_json_auto_id(
{
"type": "zwave_js/restore_nvm",
"entry_id": integration.entry_id,
"data": "dGVzdA==", # base64 encoded "test"
}
)
# Verify the finished event first
msg = await ws_client.receive_json()
assert msg["type"] == "event"
assert msg["event"]["event"] == "finished"
# Verify subscription success
msg = await ws_client.receive_json()
assert msg["type"] == "result"
assert msg["success"] is True
# Simulate progress events
event = Event(
"nvm restore progress",
{
"source": "controller",
"event": "nvm restore progress",
"bytesWritten": 25,
"total": 100,
},
)
controller.receive_event(event)
msg = await ws_client.receive_json()
assert msg["event"]["event"] == "nvm restore progress"
assert msg["event"]["bytesWritten"] == 25
assert msg["event"]["total"] == 100
event = Event(
"nvm restore progress",
{
"source": "controller",
"event": "nvm restore progress",
"bytesWritten": 50,
"total": 100,
},
)
controller.receive_event(event)
msg = await ws_client.receive_json()
assert msg["event"]["event"] == "nvm restore progress"
assert msg["event"]["bytesWritten"] == 50
assert msg["event"]["total"] == 100
await hass.async_block_till_done()
# Verify the restore was called
# The first call is the relevant one for nvm restore.
assert client.async_send_command.call_count == 3
assert client.async_send_command.call_args_list[0] == call(
{
"command": "controller.restore_nvm",
"nvmData": "dGVzdA==",
},
require_schema=14,
)
client.async_send_command.reset_mock()
# Test sending command with driver not ready and timeout.
async def async_send_command_no_driver_ready(
message: dict[str, Any],
require_schema: int | None = None,
) -> dict:
"""Send a command and get a response."""
return {}
client.async_send_command.side_effect = async_send_command_no_driver_ready
with patch(
"homeassistant.components.zwave_js.api.RESTORE_NVM_DRIVER_READY_TIMEOUT",
new=0,
):
# Send the subscription request
await ws_client.send_json_auto_id(
{
@ -5533,6 +5621,7 @@ async def test_restore_nvm(
# Verify the finished event first
msg = await ws_client.receive_json()
assert msg["type"] == "event"
assert msg["event"]["event"] == "finished"
@ -5541,48 +5630,25 @@ async def test_restore_nvm(
assert msg["type"] == "result"
assert msg["success"] is True
# Simulate progress events
event = Event(
"nvm restore progress",
{
"source": "controller",
"event": "nvm restore progress",
"bytesWritten": 25,
"total": 100,
},
)
controller.receive_event(event)
msg = await ws_client.receive_json()
assert msg["event"]["event"] == "nvm restore progress"
assert msg["event"]["bytesWritten"] == 25
assert msg["event"]["total"] == 100
event = Event(
"nvm restore progress",
{
"source": "controller",
"event": "nvm restore progress",
"bytesWritten": 50,
"total": 100,
},
)
controller.receive_event(event)
msg = await ws_client.receive_json()
assert msg["event"]["event"] == "nvm restore progress"
assert msg["event"]["bytesWritten"] == 50
assert msg["event"]["total"] == 100
# Wait for the restore to complete
await hass.async_block_till_done()
# Verify the restore was called
assert mock_restore.called
# Verify the restore was called
# The first call is the relevant one for nvm restore.
assert client.async_send_command.call_count == 3
assert client.async_send_command.call_args_list[0] == call(
{
"command": "controller.restore_nvm",
"nvmData": "dGVzdA==",
},
require_schema=14,
)
client.async_send_command.reset_mock()
# Test restore failure
with patch.object(
controller,
"async_restore_nvm_base64",
side_effect=FailedCommand("failed_command", "Restore failed"),
with patch(
f"{CONTROLLER_PATCH_PREFIX}.async_restore_nvm_base64",
side_effect=FailedZWaveCommand("failed_command", 1, "error message"),
):
# Send the subscription request
await ws_client.send_json_auto_id(
@ -5596,7 +5662,7 @@ async def test_restore_nvm(
# Verify error response
msg = await ws_client.receive_json()
assert not msg["success"]
assert msg["error"]["code"] == "Restore failed"
assert msg["error"]["code"] == "zwave_error"
# Test entry_id not found
await ws_client.send_json_auto_id(

View File

@ -431,10 +431,11 @@ async def test_rediscovery(
async def test_aeotec_smart_switch_7(
hass: HomeAssistant,
entity_registry: er.EntityRegistry,
aeotec_smart_switch_7: Node,
integration: MockConfigEntry,
) -> None:
"""Test that Smart Switch 7 has a light and a switch entity."""
"""Test Smart Switch 7 discovery."""
state = hass.states.get("light.smart_switch_7")
assert state
assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [
@ -443,3 +444,9 @@ async def test_aeotec_smart_switch_7(
state = hass.states.get("switch.smart_switch_7")
assert state
state = hass.states.get("button.smart_switch_7_reset_accumulated_values")
assert state
entity_entry = entity_registry.async_get(state.entity_id)
assert entity_entry
assert entity_entry.entity_category is EntityCategory.CONFIG