This commit is contained in:
Franck Nijhof 2025-05-23 17:09:32 +02:00 committed by GitHub
commit 3e6473d130
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
62 changed files with 1082 additions and 389 deletions

View File

@ -7,5 +7,5 @@
"integration_type": "device",
"iot_class": "local_push",
"loggers": ["pyaprilaire"],
"requirements": ["pyaprilaire==0.8.1"]
"requirements": ["pyaprilaire==0.9.0"]
}

View File

@ -2,8 +2,8 @@
from aiohttp import ClientTimeout
from azure.core.exceptions import (
AzureError,
ClientAuthenticationError,
HttpResponseError,
ResourceNotFoundError,
)
from azure.core.pipeline.transport._aiohttp import (
@ -70,7 +70,7 @@ async def async_setup_entry(
translation_key="invalid_auth",
translation_placeholders={CONF_ACCOUNT_NAME: entry.data[CONF_ACCOUNT_NAME]},
) from err
except HttpResponseError as err:
except AzureError as err:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="cannot_connect",

View File

@ -8,7 +8,7 @@ import json
import logging
from typing import Any, Concatenate
from azure.core.exceptions import HttpResponseError
from azure.core.exceptions import AzureError, HttpResponseError, ServiceRequestError
from azure.storage.blob import BlobProperties
from homeassistant.components.backup import (
@ -80,6 +80,20 @@ def handle_backup_errors[_R, **P](
f"Error during backup operation in {func.__name__}:"
f" Status {err.status_code}, message: {err.message}"
) from err
except ServiceRequestError as err:
raise BackupAgentError(
f"Timeout during backup operation in {func.__name__}"
) from err
except AzureError as err:
_LOGGER.debug(
"Error during backup in %s: %s",
func.__name__,
err,
exc_info=True,
)
raise BackupAgentError(
f"Error during backup operation in {func.__name__}: {err}"
) from err
return wrapper

View File

@ -8,5 +8,5 @@
"iot_class": "local_polling",
"loggers": ["aiocomelit"],
"quality_scale": "bronze",
"requirements": ["aiocomelit==0.12.1"]
"requirements": ["aiocomelit==0.12.3"]
}

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
"iot_class": "cloud_push",
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
"requirements": ["py-sucks==0.9.10", "deebot-client==13.2.0"]
"requirements": ["py-sucks==0.9.10", "deebot-client==13.2.1"]
}

View File

@ -78,7 +78,9 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSensorEntityDescription, ...] = (
capability_fn=lambda caps: caps.stats.clean,
value_fn=lambda e: e.area,
translation_key="stats_area",
device_class=SensorDeviceClass.AREA,
native_unit_of_measurement_fn=get_area_native_unit_of_measurement,
suggested_unit_of_measurement=UnitOfArea.SQUARE_METERS,
),
EcovacsSensorEntityDescription[StatsEvent](
key="stats_time",
@ -95,7 +97,8 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSensorEntityDescription, ...] = (
value_fn=lambda e: e.area,
key="total_stats_area",
translation_key="total_stats_area",
native_unit_of_measurement_fn=get_area_native_unit_of_measurement,
device_class=SensorDeviceClass.AREA,
native_unit_of_measurement=UnitOfArea.SQUARE_METERS,
state_class=SensorStateClass.TOTAL_INCREASING,
),
EcovacsSensorEntityDescription[TotalStatsEvent](

View File

@ -6,5 +6,5 @@
"iot_class": "local_push",
"loggers": ["sense_energy"],
"quality_scale": "internal",
"requirements": ["sense-energy==0.13.7"]
"requirements": ["sense-energy==0.13.8"]
}

View File

@ -17,7 +17,7 @@ DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS = False
DEFAULT_PORT: Final = 6053
STABLE_BLE_VERSION_STR = "2025.2.2"
STABLE_BLE_VERSION_STR = "2025.5.0"
STABLE_BLE_VERSION = AwesomeVersion(STABLE_BLE_VERSION_STR)
PROJECT_URLS = {
"esphome.bluetooth-proxy": "https://esphome.github.io/bluetooth-proxies/",

View File

@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "local_push",
"loggers": ["pyfibaro"],
"requirements": ["pyfibaro==0.8.2"]
"requirements": ["pyfibaro==0.8.3"]
}

View File

@ -254,11 +254,11 @@ async def google_generative_ai_config_option_schema(
)
for api_model in sorted(api_models, key=lambda x: x.display_name or "")
if (
api_model.name != "models/gemini-1.0-pro" # duplicate of gemini-pro
and api_model.display_name
api_model.display_name
and api_model.name
and api_model.supported_actions
and "tts" not in api_model.name
and "vision" not in api_model.name
and api_model.supported_actions
and "generateContent" in api_model.supported_actions
)
]

View File

@ -319,11 +319,10 @@ class GoogleGenerativeAIConversationEntity(
tools.append(Tool(google_search=GoogleSearch()))
model_name = self.entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
# Gemini 1.0 doesn't support system_instruction while 1.5 does.
# Assume future versions will support it (if not, the request fails with a
# clear message at which point we can fix).
# Avoid INVALID_ARGUMENT Developer instruction is not enabled for <model>
supports_system_instruction = (
"gemini-1.0" not in model_name and "gemini-pro" not in model_name
"gemma" not in model_name
and "gemini-2.0-flash-preview-image-generation" not in model_name
)
prompt_content = cast(

View File

@ -41,12 +41,12 @@
},
"data_description": {
"prompt": "Instruct how the LLM should respond. This can be a template.",
"enable_google_search_tool": "Only works with \"No control\" in the \"Control Home Assistant\" setting. See docs for a workaround using it with \"Assist\"."
"enable_google_search_tool": "Only works if there is nothing selected in the \"Control Home Assistant\" setting. See docs for a workaround using it with \"Assist\"."
}
}
},
"error": {
"invalid_google_search_option": "Google Search cannot be enabled alongside any Assist capability, this can only be used when Assist is set to \"No control\"."
"invalid_google_search_option": "Google Search can only be enabled if nothing is selected in the \"Control Home Assistant\" setting."
}
},
"services": {

View File

@ -60,6 +60,9 @@ class HistoryStats:
self._start = start
self._end = end
self._pending_events: list[Event[EventStateChangedData]] = []
self._query_count = 0
async def async_update(
self, event: Event[EventStateChangedData] | None
) -> HistoryStatsState:
@ -85,6 +88,14 @@ class HistoryStats:
utc_now = dt_util.utcnow()
now_timestamp = floored_timestamp(utc_now)
# If we end up querying data from the recorder when we get triggered by a new state
# change event, it is possible this function could be reentered a second time before
# the first recorder query returns. In that case a second recorder query will be done
# and we need to hold the new event so that we can append it after the second query.
# Otherwise the event will be dropped.
if event:
self._pending_events.append(event)
if current_period_start_timestamp > now_timestamp:
# History cannot tell the future
self._history_current_period = []
@ -113,15 +124,14 @@ class HistoryStats:
start_changed = (
current_period_start_timestamp != previous_period_start_timestamp
)
end_changed = current_period_end_timestamp != previous_period_end_timestamp
if start_changed:
self._prune_history_cache(current_period_start_timestamp)
new_data = False
if event and (new_state := event.data["new_state"]) is not None:
if (
current_period_start_timestamp
<= floored_timestamp(new_state.last_changed)
<= current_period_end_timestamp
if current_period_start_timestamp <= floored_timestamp(
new_state.last_changed
):
self._history_current_period.append(
HistoryState(new_state.state, new_state.last_changed_timestamp)
@ -131,26 +141,31 @@ class HistoryStats:
not new_data
and current_period_end_timestamp < now_timestamp
and not start_changed
and not end_changed
):
# If period has not changed and current time after the period end...
# Don't compute anything as the value cannot have changed
return self._state
else:
await self._async_history_from_db(
current_period_start_timestamp, current_period_end_timestamp
current_period_start_timestamp, now_timestamp
)
if event and (new_state := event.data["new_state"]) is not None:
if (
current_period_start_timestamp
<= floored_timestamp(new_state.last_changed)
<= current_period_end_timestamp
for pending_event in self._pending_events:
if (new_state := pending_event.data["new_state"]) is not None:
if current_period_start_timestamp <= floored_timestamp(
new_state.last_changed
):
self._history_current_period.append(
HistoryState(new_state.state, new_state.last_changed_timestamp)
HistoryState(
new_state.state, new_state.last_changed_timestamp
)
)
self._has_recorder_data = True
if self._query_count == 0:
self._pending_events.clear()
seconds_matched, match_count = self._async_compute_seconds_and_changes(
now_timestamp,
current_period_start_timestamp,
@ -165,12 +180,16 @@ class HistoryStats:
current_period_end_timestamp: float,
) -> None:
"""Update history data for the current period from the database."""
self._query_count += 1
try:
instance = get_instance(self.hass)
states = await instance.async_add_executor_job(
self._state_changes_during_period,
current_period_start_timestamp,
current_period_end_timestamp,
)
finally:
self._query_count -= 1
self._history_current_period = [
HistoryState(state.state, state.last_changed.timestamp())
for state in states
@ -208,6 +227,9 @@ class HistoryStats:
current_state_matches = history_state.state in self._entity_states
state_change_timestamp = history_state.last_changed
if math.floor(state_change_timestamp) > end_timestamp:
break
if math.floor(state_change_timestamp) > now_timestamp:
# Shouldn't count states that are in the future
_LOGGER.debug(
@ -215,7 +237,7 @@ class HistoryStats:
state_change_timestamp,
now_timestamp,
)
continue
break
if previous_state_matches:
elapsed += state_change_timestamp - last_state_change_timestamp

View File

@ -5,5 +5,5 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/holiday",
"iot_class": "local_polling",
"requirements": ["holidays==0.72", "babel==2.15.0"]
"requirements": ["holidays==0.73", "babel==2.15.0"]
}

View File

@ -5,7 +5,7 @@ from dataclasses import dataclass
from typing import cast
from pylamarzocco import LaMarzoccoMachine
from pylamarzocco.const import BackFlushStatus, MachineState, WidgetType
from pylamarzocco.const import BackFlushStatus, MachineState, ModelName, WidgetType
from pylamarzocco.models import BackFlush, MachineStatus
from homeassistant.components.binary_sensor import (
@ -66,6 +66,9 @@ ENTITIES: tuple[LaMarzoccoBinarySensorEntityDescription, ...] = (
is BackFlushStatus.REQUESTED
),
entity_category=EntityCategory.DIAGNOSTIC,
supported_fn=lambda coordinator: (
coordinator.device.dashboard.model_name != ModelName.GS3_MP
),
),
LaMarzoccoBinarySensorEntityDescription(
key="websocket_connected",

View File

@ -37,5 +37,5 @@
"iot_class": "cloud_push",
"loggers": ["pylamarzocco"],
"quality_scale": "platinum",
"requirements": ["pylamarzocco==2.0.3"]
"requirements": ["pylamarzocco==2.0.4"]
}

View File

@ -4,7 +4,7 @@ import asyncio
from dataclasses import dataclass
from typing import Any
from pylamarzocco.const import FirmwareType, UpdateCommandStatus
from pylamarzocco.const import FirmwareType, UpdateStatus
from pylamarzocco.exceptions import RequestNotSuccessful
from homeassistant.components.update import (
@ -125,7 +125,7 @@ class LaMarzoccoUpdateEntity(LaMarzoccoEntity, UpdateEntity):
await self.coordinator.device.update_firmware()
while (
update_progress := await self.coordinator.device.get_firmware()
).command_status is UpdateCommandStatus.IN_PROGRESS:
).command_status is UpdateStatus.IN_PROGRESS:
if counter >= MAX_UPDATE_WAIT:
_raise_timeout_error()
self._attr_update_percentage = update_progress.progress_percentage

View File

@ -9,7 +9,11 @@
"condition_type": {
"is_locked": "{entity_name} is locked",
"is_unlocked": "{entity_name} is unlocked",
"is_open": "{entity_name} is open"
"is_open": "{entity_name} is open",
"is_jammed": "{entity_name} is jammed",
"is_locking": "{entity_name} is locking",
"is_unlocking": "{entity_name} is unlocking",
"is_opening": "{entity_name} is opening"
},
"trigger_type": {
"locked": "{entity_name} locked",

View File

@ -475,7 +475,7 @@ class MatrixBot:
file_stat = await aiofiles.os.stat(image_path)
_LOGGER.debug("Uploading file from path, %s", image_path)
async with aiofiles.open(image_path, "r+b") as image_file:
async with aiofiles.open(image_path, "rb") as image_file:
response, _ = await self._client.upload(
image_file,
content_type=mime_type,

View File

@ -8,6 +8,6 @@
"iot_class": "calculated",
"loggers": ["yt_dlp"],
"quality_scale": "internal",
"requirements": ["yt-dlp[default]==2025.03.31"],
"requirements": ["yt-dlp[default]==2025.05.22"],
"single_config_entry": true
}

View File

@ -248,7 +248,9 @@ class NetatmoThermostat(NetatmoRoomEntity, ClimateEntity):
if self.home.entity_id != data["home_id"]:
return
if data["event_type"] == EVENT_TYPE_SCHEDULE and "schedule_id" in data:
if data["event_type"] == EVENT_TYPE_SCHEDULE:
# handle schedule change
if "schedule_id" in data:
self._selected_schedule = getattr(
self.hass.data[DOMAIN][DATA_SCHEDULES][self.home.entity_id].get(
data["schedule_id"]
@ -261,6 +263,7 @@ class NetatmoThermostat(NetatmoRoomEntity, ClimateEntity):
)
self.async_write_ha_state()
self.data_handler.async_force_update(self._signal_name)
# ignore other schedule events
return
home = data["home"]

View File

@ -7,5 +7,5 @@
"iot_class": "cloud_push",
"loggers": ["aionfty"],
"quality_scale": "bronze",
"requirements": ["aiontfy==0.5.1"]
"requirements": ["aiontfy==0.5.2"]
}

View File

@ -1,6 +1,7 @@
{
"domain": "onedrive",
"name": "OneDrive",
"after_dependencies": ["cloud"],
"codeowners": ["@zweckj"],
"config_flow": true,
"dependencies": ["application_credentials"],

View File

@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/opower",
"iot_class": "cloud_polling",
"loggers": ["opower"],
"requirements": ["opower==0.12.0"]
"requirements": ["opower==0.12.1"]
}

View File

@ -19,9 +19,7 @@ from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon
MultiprotocolAddonManager,
get_multiprotocol_addon_manager,
is_multiprotocol_url,
multi_pan_addon_using_device,
)
from homeassistant.components.homeassistant_yellow import RADIO_DEVICE as YELLOW_RADIO
from homeassistant.config_entries import SOURCE_USER
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
@ -34,10 +32,6 @@ if TYPE_CHECKING:
_LOGGER = logging.getLogger(__name__)
INFO_URL_SKY_CONNECT = (
"https://skyconnect.home-assistant.io/multiprotocol-channel-missmatch"
)
INFO_URL_YELLOW = "https://yellow.home-assistant.io/multiprotocol-channel-missmatch"
INSECURE_NETWORK_KEYS = (
# Thread web UI default
@ -208,16 +202,12 @@ async def _warn_on_channel_collision(
delete_issue()
return
yellow = await multi_pan_addon_using_device(hass, YELLOW_RADIO)
learn_more_url = INFO_URL_YELLOW if yellow else INFO_URL_SKY_CONNECT
ir.async_create_issue(
hass,
DOMAIN,
f"otbr_zha_channel_collision_{otbrdata.entry_id}",
is_fixable=False,
is_persistent=False,
learn_more_url=learn_more_url,
severity=ir.IssueSeverity.WARNING,
translation_key="otbr_zha_channel_collision",
translation_placeholders={

View File

@ -6,6 +6,7 @@ from contextlib import contextmanager, nullcontext
from datetime import timedelta
import logging
from typing import Any
import warnings
from qnapstats import QNAPStats
import urllib3
@ -37,7 +38,8 @@ def suppress_insecure_request_warning():
Was added in here to solve the following issue, not being solved upstream.
https://github.com/colinodell/python-qnapstats/issues/96
"""
with urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning):
with warnings.catch_warnings():
warnings.simplefilter("ignore", urllib3.exceptions.InsecureRequestWarning)
yield

View File

@ -364,10 +364,7 @@ def migrate_entity_ids(
devices = dr.async_entries_for_config_entry(device_reg, config_entry_id)
ch_device_ids = {}
for device in devices:
for dev_id in device.identifiers:
(device_uid, ch, is_chime) = get_device_uid_and_ch(dev_id, host)
if not device_uid:
continue
(device_uid, ch, is_chime) = get_device_uid_and_ch(device, host)
if host.api.supported(None, "UID") and device_uid[0] != host.unique_id:
if ch is None:
@ -380,30 +377,37 @@ def migrate_entity_ids(
new_device_id,
)
new_identifiers = {(DOMAIN, new_device_id)}
device_reg.async_update_device(
device.id, new_identifiers=new_identifiers
)
device_reg.async_update_device(device.id, new_identifiers=new_identifiers)
if ch is None or is_chime:
continue # Do not consider the NVR itself or chimes
# Check for wrongfully combined host with NVR entities in one device
# Check for wrongfully combined entities in one device
# Can be removed in HA 2025.12
if (DOMAIN, host.unique_id) in device.identifiers:
new_identifiers = device.identifiers.copy()
remove_ids = False
if (DOMAIN, host.unique_id) in device.identifiers:
remove_ids = True # NVR/Hub in identifiers, keep that one, remove others
for old_id in device.identifiers:
if old_id[0] == DOMAIN and old_id[1] != host.unique_id:
(old_device_uid, old_ch, old_is_chime) = get_device_uid_and_ch(old_id, host)
if (
not old_device_uid
or old_device_uid[0] != host.unique_id
or old_id[1] == host.unique_id
):
continue
if remove_ids:
new_identifiers.remove(old_id)
remove_ids = True # after the first identifier, remove the others
if new_identifiers != device.identifiers:
_LOGGER.debug(
"Updating Reolink device identifiers from %s to %s",
device.identifiers,
new_identifiers,
)
device_reg.async_update_device(
device.id, new_identifiers=new_identifiers
)
device_reg.async_update_device(device.id, new_identifiers=new_identifiers)
break
if ch is None or is_chime:
continue # Do not consider the NVR itself or chimes
# Check for wrongfully added MAC of the NVR/Hub to the camera
# Can be removed in HA 2025.12
host_connnection = (CONNECTION_NETWORK_MAC, host.api.mac_address)
@ -415,14 +419,10 @@ def migrate_entity_ids(
device.connections,
new_connections,
)
device_reg.async_update_device(
device.id, new_connections=new_connections
)
device_reg.async_update_device(device.id, new_connections=new_connections)
ch_device_ids[device.id] = ch
if host.api.supported(ch, "UID") and device_uid[1] != host.api.camera_uid(
ch
):
if host.api.supported(ch, "UID") and device_uid[1] != host.api.camera_uid(ch):
if host.api.supported(None, "UID"):
new_device_id = f"{host.unique_id}_{host.api.camera_uid(ch)}"
else:
@ -433,9 +433,7 @@ def migrate_entity_ids(
new_device_id,
)
new_identifiers = {(DOMAIN, new_device_id)}
existing_device = device_reg.async_get_device(
identifiers=new_identifiers
)
existing_device = device_reg.async_get_device(identifiers=new_identifiers)
if existing_device is None:
device_reg.async_update_device(
device.id, new_identifiers=new_identifiers

View File

@ -20,5 +20,5 @@
"documentation": "https://www.home-assistant.io/integrations/sense",
"iot_class": "cloud_polling",
"loggers": ["sense_energy"],
"requirements": ["sense-energy==0.13.7"]
"requirements": ["sense-energy==0.13.8"]
}

View File

@ -31,7 +31,7 @@ from .entity import SmartThingsEntity
ATTR_OPERATION_STATE = "operation_state"
MODE_TO_STATE = {
"auto": HVACMode.AUTO,
"auto": HVACMode.HEAT_COOL,
"cool": HVACMode.COOL,
"eco": HVACMode.AUTO,
"rush hour": HVACMode.AUTO,
@ -40,7 +40,7 @@ MODE_TO_STATE = {
"off": HVACMode.OFF,
}
STATE_TO_MODE = {
HVACMode.AUTO: "auto",
HVACMode.HEAT_COOL: "auto",
HVACMode.COOL: "cool",
HVACMode.HEAT: "heat",
HVACMode.OFF: "off",

View File

@ -30,5 +30,5 @@
"iot_class": "cloud_push",
"loggers": ["pysmartthings"],
"quality_scale": "bronze",
"requirements": ["pysmartthings==3.2.2"]
"requirements": ["pysmartthings==3.2.3"]
}

View File

@ -151,6 +151,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: SqueezeboxConfigEntry) -
player_coordinator = SqueezeBoxPlayerUpdateCoordinator(
hass, entry, player, lms.uuid
)
await player_coordinator.async_refresh()
known_players.append(player.player_id)
async_dispatcher_send(
hass, SIGNAL_PLAYER_DISCOVERED, player_coordinator

View File

@ -12,5 +12,5 @@
"documentation": "https://www.home-assistant.io/integrations/squeezebox",
"iot_class": "local_polling",
"loggers": ["pysqueezebox"],
"requirements": ["pysqueezebox==0.12.0"]
"requirements": ["pysqueezebox==0.12.1"]
}

View File

@ -6,7 +6,7 @@ from collections.abc import Callable
from datetime import datetime
import json
import logging
from typing import TYPE_CHECKING, Any
from typing import TYPE_CHECKING, Any, cast
from pysqueezebox import Server, async_discover
import voluptuous as vol
@ -329,22 +329,22 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
@property
def media_title(self) -> str | None:
"""Title of current playing media."""
return str(self._player.title)
return cast(str | None, self._player.title)
@property
def media_channel(self) -> str | None:
"""Channel (e.g. webradio name) of current playing media."""
return str(self._player.remote_title)
return cast(str | None, self._player.remote_title)
@property
def media_artist(self) -> str | None:
"""Artist of current playing media."""
return str(self._player.artist)
return cast(str | None, self._player.artist)
@property
def media_album_name(self) -> str | None:
"""Album of current playing media."""
return str(self._player.album)
return cast(str | None, self._player.album)
@property
def repeat(self) -> RepeatMode:

View File

@ -236,7 +236,7 @@ class SynologyDSMBackupAgent(BackupAgent):
raise BackupAgentError("Failed to read meta data") from err
try:
files = await self._file_station.get_files(path=self.path)
files = await self._file_station.get_files(path=self.path, limit=1000)
except SynologyDSMAPIErrorException as err:
raise BackupAgentError("Failed to list backups") from err

View File

@ -7,7 +7,7 @@
"documentation": "https://www.home-assistant.io/integrations/synology_dsm",
"iot_class": "local_polling",
"loggers": ["synology_dsm"],
"requirements": ["py-synologydsm-api==2.7.1"],
"requirements": ["py-synologydsm-api==2.7.2"],
"ssdp": [
{
"manufacturer": "Synology",

View File

@ -14,7 +14,7 @@
"velbus-protocol"
],
"quality_scale": "bronze",
"requirements": ["velbus-aio==2025.4.2"],
"requirements": ["velbus-aio==2025.5.0"],
"usb": [
{
"vid": "10CF",

View File

@ -2,6 +2,7 @@
from __future__ import annotations
import asyncio
from datetime import timedelta
from typing import Any
@ -17,7 +18,8 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import WebControlProConfigEntry
from .entity import WebControlProGenericEntity
SCAN_INTERVAL = timedelta(seconds=5)
ACTION_DELAY = 0.5
SCAN_INTERVAL = timedelta(seconds=10)
PARALLEL_UPDATES = 1
@ -56,6 +58,7 @@ class WebControlProCover(WebControlProGenericEntity, CoverEntity):
"""Move the cover to a specific position."""
action = self._dest.action(self._drive_action_desc)
await action(percentage=100 - kwargs[ATTR_POSITION])
await asyncio.sleep(ACTION_DELAY)
@property
def is_closed(self) -> bool | None:
@ -66,11 +69,13 @@ class WebControlProCover(WebControlProGenericEntity, CoverEntity):
"""Open the cover."""
action = self._dest.action(self._drive_action_desc)
await action(percentage=0)
await asyncio.sleep(ACTION_DELAY)
async def async_close_cover(self, **kwargs: Any) -> None:
"""Close the cover."""
action = self._dest.action(self._drive_action_desc)
await action(percentage=100)
await asyncio.sleep(ACTION_DELAY)
async def async_stop_cover(self, **kwargs: Any) -> None:
"""Stop the device if in motion."""
@ -79,6 +84,7 @@ class WebControlProCover(WebControlProGenericEntity, CoverEntity):
WMS_WebControl_pro_API_actionType.Stop,
)
await action()
await asyncio.sleep(ACTION_DELAY)
class WebControlProAwning(WebControlProCover):

View File

@ -2,6 +2,7 @@
from __future__ import annotations
import asyncio
from datetime import timedelta
from typing import Any
@ -16,7 +17,8 @@ from . import WebControlProConfigEntry
from .const import BRIGHTNESS_SCALE
from .entity import WebControlProGenericEntity
SCAN_INTERVAL = timedelta(seconds=5)
ACTION_DELAY = 0.5
SCAN_INTERVAL = timedelta(seconds=15)
PARALLEL_UPDATES = 1
@ -54,11 +56,13 @@ class WebControlProLight(WebControlProGenericEntity, LightEntity):
"""Turn the light on."""
action = self._dest.action(WMS_WebControl_pro_API_actionDescription.LightSwitch)
await action(onOffState=True)
await asyncio.sleep(ACTION_DELAY)
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the light off."""
action = self._dest.action(WMS_WebControl_pro_API_actionDescription.LightSwitch)
await action(onOffState=False)
await asyncio.sleep(ACTION_DELAY)
class WebControlProDimmer(WebControlProLight):
@ -87,3 +91,4 @@ class WebControlProDimmer(WebControlProLight):
await action(
percentage=brightness_to_value(BRIGHTNESS_SCALE, kwargs[ATTR_BRIGHTNESS])
)
await asyncio.sleep(ACTION_DELAY)

View File

@ -7,5 +7,5 @@
"iot_class": "local_polling",
"loggers": ["holidays"],
"quality_scale": "internal",
"requirements": ["holidays==0.72"]
"requirements": ["holidays==0.73"]
}

View File

@ -105,6 +105,7 @@ from .const import (
CONF_USE_ADDON,
DATA_CLIENT,
DOMAIN,
DRIVER_READY_TIMEOUT,
EVENT_DEVICE_ADDED_TO_REGISTRY,
EVENT_VALUE_UPDATED,
LIB_LOGGER,
@ -135,7 +136,6 @@ from .services import ZWaveServices
CONNECT_TIMEOUT = 10
DATA_DRIVER_EVENTS = "driver_events"
DRIVER_READY_TIMEOUT = 60
CONFIG_SCHEMA = vol.Schema(
{

View File

@ -88,9 +88,9 @@ from .const import (
CONF_INSTALLER_MODE,
DATA_CLIENT,
DOMAIN,
DRIVER_READY_TIMEOUT,
EVENT_DEVICE_ADDED_TO_REGISTRY,
LOGGER,
RESTORE_NVM_DRIVER_READY_TIMEOUT,
USER_AGENT,
)
from .helpers import (
@ -189,8 +189,6 @@ STRATEGY = "strategy"
# https://github.com/zwave-js/node-zwave-js/blob/master/packages/core/src/security/QR.ts#L41
MINIMUM_QR_STRING_LENGTH = 52
HARD_RESET_CONTROLLER_DRIVER_READY_TIMEOUT = 60
# Helper schemas
PLANNED_PROVISIONING_ENTRY_SCHEMA = vol.All(
@ -2858,7 +2856,7 @@ async def websocket_hard_reset_controller(
await driver.async_hard_reset()
with suppress(TimeoutError):
async with asyncio.timeout(HARD_RESET_CONTROLLER_DRIVER_READY_TIMEOUT):
async with asyncio.timeout(DRIVER_READY_TIMEOUT):
await wait_driver_ready.wait()
# When resetting the controller, the controller home id is also changed.
@ -3105,8 +3103,29 @@ async def websocket_restore_nvm(
await controller.async_restore_nvm_base64(msg["data"])
with suppress(TimeoutError):
async with asyncio.timeout(RESTORE_NVM_DRIVER_READY_TIMEOUT):
async with asyncio.timeout(DRIVER_READY_TIMEOUT):
await wait_driver_ready.wait()
# When restoring the NVM to the controller, the controller home id is also changed.
# The controller state in the client is stale after restoring the NVM,
# so get the new home id with a new client using the helper function.
# The client state will be refreshed by reloading the config entry,
# after the unique id of the config entry has been updated.
try:
version_info = await async_get_version_info(hass, entry.data[CONF_URL])
except CannotConnect:
# Just log this error, as there's nothing to do about it here.
# The stale unique id needs to be handled by a repair flow,
# after the config entry has been reloaded.
LOGGER.error(
"Failed to get server version, cannot update config entry"
"unique id with new home id, after controller NVM restore"
)
else:
hass.config_entries.async_update_entry(
entry, unique_id=str(version_info.home_id)
)
await hass.config_entries.async_reload(entry.entry_id)
connection.send_message(

View File

@ -65,7 +65,7 @@ from .const import (
CONF_USE_ADDON,
DATA_CLIENT,
DOMAIN,
RESTORE_NVM_DRIVER_READY_TIMEOUT,
DRIVER_READY_TIMEOUT,
)
from .helpers import CannotConnect, async_get_version_info
@ -776,16 +776,13 @@ class ZWaveJSConfigFlow(ConfigFlow, domain=DOMAIN):
)
@callback
def _async_update_entry(
self, updates: dict[str, Any], *, schedule_reload: bool = True
) -> None:
def _async_update_entry(self, updates: dict[str, Any]) -> None:
"""Update the config entry with new data."""
config_entry = self._reconfigure_config_entry
assert config_entry is not None
self.hass.config_entries.async_update_entry(
config_entry, data=config_entry.data | updates
)
if schedule_reload:
self.hass.config_entries.async_schedule_reload(config_entry.entry_id)
async def async_step_intent_reconfigure(
@ -896,15 +893,63 @@ class ZWaveJSConfigFlow(ConfigFlow, domain=DOMAIN):
# Now that the old controller is gone, we can scan for serial ports again
return await self.async_step_choose_serial_port()
try:
driver = self._get_driver()
except AbortFlow:
return self.async_abort(reason="config_entry_not_loaded")
@callback
def set_driver_ready(event: dict) -> None:
"Set the driver ready event."
wait_driver_ready.set()
wait_driver_ready = asyncio.Event()
unsubscribe = driver.once("driver ready", set_driver_ready)
# reset the old controller
try:
await self._get_driver().async_hard_reset()
except (AbortFlow, FailedCommand) as err:
await driver.async_hard_reset()
except FailedCommand as err:
unsubscribe()
_LOGGER.error("Failed to reset controller: %s", err)
return self.async_abort(reason="reset_failed")
# Update the unique id of the config entry
# to the new home id, which requires waiting for the driver
# to be ready before getting the new home id.
# If the backup restore, done later in the flow, fails,
# the config entry unique id should be the new home id
# after the controller reset.
try:
async with asyncio.timeout(DRIVER_READY_TIMEOUT):
await wait_driver_ready.wait()
except TimeoutError:
pass
finally:
unsubscribe()
config_entry = self._reconfigure_config_entry
assert config_entry is not None
try:
version_info = await async_get_version_info(
self.hass, config_entry.data[CONF_URL]
)
except CannotConnect:
# Just log this error, as there's nothing to do about it here.
# The stale unique id needs to be handled by a repair flow,
# after the config entry has been reloaded, if the backup restore
# also fails.
_LOGGER.debug(
"Failed to get server version, cannot update config entry "
"unique id with new home id, after controller reset"
)
else:
self.hass.config_entries.async_update_entry(
config_entry, unique_id=str(version_info.home_id)
)
# Unload the config entry before asking the user to unplug the controller.
await self.hass.config_entries.async_unload(config_entry.entry_id)
@ -1154,14 +1199,17 @@ class ZWaveJSConfigFlow(ConfigFlow, domain=DOMAIN):
assert ws_address is not None
version_info = self.version_info
assert version_info is not None
config_entry = self._reconfigure_config_entry
assert config_entry is not None
# We need to wait for the config entry to be reloaded,
# before restoring the backup.
# We will do this in the restore nvm progress task,
# to get a nicer user experience.
self._async_update_entry(
{
"unique_id": str(version_info.home_id),
self.hass.config_entries.async_update_entry(
config_entry,
data={
**config_entry.data,
CONF_URL: ws_address,
CONF_USB_PATH: self.usb_path,
CONF_S0_LEGACY_KEY: self.s0_legacy_key,
@ -1173,8 +1221,9 @@ class ZWaveJSConfigFlow(ConfigFlow, domain=DOMAIN):
CONF_USE_ADDON: True,
CONF_INTEGRATION_CREATED_ADDON: self.integration_created_addon,
},
schedule_reload=False,
unique_id=str(version_info.home_id),
)
return await self.async_step_restore_nvm()
async def async_step_finish_addon_setup_reconfigure(
@ -1321,8 +1370,24 @@ class ZWaveJSConfigFlow(ConfigFlow, domain=DOMAIN):
raise AbortFlow(f"Failed to restore network: {err}") from err
else:
with suppress(TimeoutError):
async with asyncio.timeout(RESTORE_NVM_DRIVER_READY_TIMEOUT):
async with asyncio.timeout(DRIVER_READY_TIMEOUT):
await wait_driver_ready.wait()
try:
version_info = await async_get_version_info(
self.hass, config_entry.data[CONF_URL]
)
except CannotConnect:
# Just log this error, as there's nothing to do about it here.
# The stale unique id needs to be handled by a repair flow,
# after the config entry has been reloaded.
_LOGGER.error(
"Failed to get server version, cannot update config entry "
"unique id with new home id, after controller reset"
)
else:
self.hass.config_entries.async_update_entry(
config_entry, unique_id=str(version_info.home_id)
)
await self.hass.config_entries.async_reload(config_entry.entry_id)
finally:
for unsub in unsubs:

View File

@ -204,4 +204,4 @@ COVER_TILT_PROPERTY_KEYS: set[str | int | None] = {
# Other constants
RESTORE_NVM_DRIVER_READY_TIMEOUT = 60
DRIVER_READY_TIMEOUT = 60

View File

@ -25,7 +25,7 @@ if TYPE_CHECKING:
APPLICATION_NAME: Final = "HomeAssistant"
MAJOR_VERSION: Final = 2025
MINOR_VERSION: Final = 5
PATCH_VERSION: Final = "2"
PATCH_VERSION: Final = "3"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 2)

View File

@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "homeassistant"
version = "2025.5.2"
version = "2025.5.3"
license = "Apache-2.0"
license-files = ["LICENSE*", "homeassistant/backports/LICENSE*"]
description = "Open-source home automation platform running on Python 3."

28
requirements_all.txt generated
View File

@ -214,7 +214,7 @@ aiobafi6==0.9.0
aiobotocore==2.21.1
# homeassistant.components.comelit
aiocomelit==0.12.1
aiocomelit==0.12.3
# homeassistant.components.dhcp
aiodhcpwatcher==1.1.1
@ -319,7 +319,7 @@ aionanoleaf==0.2.1
aionotion==2024.03.0
# homeassistant.components.ntfy
aiontfy==0.5.1
aiontfy==0.5.2
# homeassistant.components.nut
aionut==4.3.4
@ -762,7 +762,7 @@ debugpy==1.8.13
# decora==0.6
# homeassistant.components.ecovacs
deebot-client==13.2.0
deebot-client==13.2.1
# homeassistant.components.ihc
# homeassistant.components.namecheapdns
@ -1158,7 +1158,7 @@ hole==0.8.0
# homeassistant.components.holiday
# homeassistant.components.workday
holidays==0.72
holidays==0.73
# homeassistant.components.frontend
home-assistant-frontend==20250516.0
@ -1614,7 +1614,7 @@ openwrt-luci-rpc==1.1.17
openwrt-ubus-rpc==0.0.2
# homeassistant.components.opower
opower==0.12.0
opower==0.12.1
# homeassistant.components.oralb
oralb-ble==0.17.6
@ -1771,7 +1771,7 @@ py-schluter==0.1.7
py-sucks==0.9.10
# homeassistant.components.synology_dsm
py-synologydsm-api==2.7.1
py-synologydsm-api==2.7.2
# homeassistant.components.atome
pyAtome==0.1.1
@ -1829,7 +1829,7 @@ pyairnow==1.2.1
pyairvisual==2023.08.1
# homeassistant.components.aprilaire
pyaprilaire==0.8.1
pyaprilaire==0.9.0
# homeassistant.components.asuswrt
pyasuswrt==0.1.21
@ -1973,7 +1973,7 @@ pyevilgenius==2.0.0
pyezviz==0.2.1.2
# homeassistant.components.fibaro
pyfibaro==0.8.2
pyfibaro==0.8.3
# homeassistant.components.fido
pyfido==2.1.2
@ -2093,7 +2093,7 @@ pykwb==0.0.8
pylacrosse==0.4
# homeassistant.components.lamarzocco
pylamarzocco==2.0.3
pylamarzocco==2.0.4
# homeassistant.components.lastfm
pylast==5.1.0
@ -2326,7 +2326,7 @@ pysma==0.7.5
pysmappee==0.2.29
# homeassistant.components.smartthings
pysmartthings==3.2.2
pysmartthings==3.2.3
# homeassistant.components.smarty
pysmarty2==0.10.2
@ -2356,7 +2356,7 @@ pyspcwebgw==0.7.0
pyspeex-noise==1.0.2
# homeassistant.components.squeezebox
pysqueezebox==0.12.0
pysqueezebox==0.12.1
# homeassistant.components.stiebel_eltron
pystiebeleltron==0.1.0
@ -2713,7 +2713,7 @@ sendgrid==6.8.2
# homeassistant.components.emulated_kasa
# homeassistant.components.sense
sense-energy==0.13.7
sense-energy==0.13.8
# homeassistant.components.sensirion_ble
sensirion-ble==0.1.1
@ -3016,7 +3016,7 @@ vallox-websocket-api==5.3.0
vehicle==2.2.2
# homeassistant.components.velbus
velbus-aio==2025.4.2
velbus-aio==2025.5.0
# homeassistant.components.venstar
venstarcolortouch==0.19
@ -3147,7 +3147,7 @@ youless-api==2.2.0
youtubeaio==1.1.5
# homeassistant.components.media_extractor
yt-dlp[default]==2025.03.31
yt-dlp[default]==2025.05.22
# homeassistant.components.zabbix
zabbix-utils==2.0.2

View File

@ -202,7 +202,7 @@ aiobafi6==0.9.0
aiobotocore==2.21.1
# homeassistant.components.comelit
aiocomelit==0.12.1
aiocomelit==0.12.3
# homeassistant.components.dhcp
aiodhcpwatcher==1.1.1
@ -301,7 +301,7 @@ aionanoleaf==0.2.1
aionotion==2024.03.0
# homeassistant.components.ntfy
aiontfy==0.5.1
aiontfy==0.5.2
# homeassistant.components.nut
aionut==4.3.4
@ -653,7 +653,7 @@ dbus-fast==2.43.0
debugpy==1.8.13
# homeassistant.components.ecovacs
deebot-client==13.2.0
deebot-client==13.2.1
# homeassistant.components.ihc
# homeassistant.components.namecheapdns
@ -988,7 +988,7 @@ hole==0.8.0
# homeassistant.components.holiday
# homeassistant.components.workday
holidays==0.72
holidays==0.73
# homeassistant.components.frontend
home-assistant-frontend==20250516.0
@ -1351,7 +1351,7 @@ openhomedevice==2.2.0
openwebifpy==4.3.1
# homeassistant.components.opower
opower==0.12.0
opower==0.12.1
# homeassistant.components.oralb
oralb-ble==0.17.6
@ -1470,7 +1470,7 @@ py-nightscout==1.2.2
py-sucks==0.9.10
# homeassistant.components.synology_dsm
py-synologydsm-api==2.7.1
py-synologydsm-api==2.7.2
# homeassistant.components.hdmi_cec
pyCEC==0.5.2
@ -1510,7 +1510,7 @@ pyairnow==1.2.1
pyairvisual==2023.08.1
# homeassistant.components.aprilaire
pyaprilaire==0.8.1
pyaprilaire==0.9.0
# homeassistant.components.asuswrt
pyasuswrt==0.1.21
@ -1612,7 +1612,7 @@ pyevilgenius==2.0.0
pyezviz==0.2.1.2
# homeassistant.components.fibaro
pyfibaro==0.8.2
pyfibaro==0.8.3
# homeassistant.components.fido
pyfido==2.1.2
@ -1708,7 +1708,7 @@ pykrakenapi==0.1.8
pykulersky==0.5.8
# homeassistant.components.lamarzocco
pylamarzocco==2.0.3
pylamarzocco==2.0.4
# homeassistant.components.lastfm
pylast==5.1.0
@ -1899,7 +1899,7 @@ pysma==0.7.5
pysmappee==0.2.29
# homeassistant.components.smartthings
pysmartthings==3.2.2
pysmartthings==3.2.3
# homeassistant.components.smarty
pysmarty2==0.10.2
@ -1929,7 +1929,7 @@ pyspcwebgw==0.7.0
pyspeex-noise==1.0.2
# homeassistant.components.squeezebox
pysqueezebox==0.12.0
pysqueezebox==0.12.1
# homeassistant.components.stiebel_eltron
pystiebeleltron==0.1.0
@ -2196,7 +2196,7 @@ securetar==2025.2.1
# homeassistant.components.emulated_kasa
# homeassistant.components.sense
sense-energy==0.13.7
sense-energy==0.13.8
# homeassistant.components.sensirion_ble
sensirion-ble==0.1.1
@ -2439,7 +2439,7 @@ vallox-websocket-api==5.3.0
vehicle==2.2.2
# homeassistant.components.velbus
velbus-aio==2025.4.2
velbus-aio==2025.5.0
# homeassistant.components.venstar
venstarcolortouch==0.19
@ -2549,7 +2549,7 @@ youless-api==2.2.0
youtubeaio==1.1.5
# homeassistant.components.media_extractor
yt-dlp[default]==2025.03.31
yt-dlp[default]==2025.05.22
# homeassistant.components.zamg
zamg==0.3.6

View File

@ -6,7 +6,7 @@ from collections.abc import AsyncGenerator
from io import StringIO
from unittest.mock import ANY, Mock, patch
from azure.core.exceptions import HttpResponseError
from azure.core.exceptions import AzureError, HttpResponseError, ServiceRequestError
from azure.storage.blob import BlobProperties
import pytest
@ -276,14 +276,33 @@ async def test_agents_error_on_download_not_found(
assert mock_client.download_blob.call_count == 0
@pytest.mark.parametrize(
("error", "message"),
[
(
HttpResponseError("http error"),
"Error during backup operation in async_delete_backup: Status None, message: http error",
),
(
ServiceRequestError("timeout"),
"Timeout during backup operation in async_delete_backup",
),
(
AzureError("generic error"),
"Error during backup operation in async_delete_backup: generic error",
),
],
)
async def test_error_during_delete(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
mock_client: MagicMock,
mock_config_entry: MockConfigEntry,
error: Exception,
message: str,
) -> None:
"""Test the error wrapper."""
mock_client.delete_blob.side_effect = HttpResponseError("Failed to delete backup")
mock_client.delete_blob.side_effect = error
client = await hass_ws_client(hass)
@ -297,12 +316,7 @@ async def test_error_during_delete(
assert response["success"]
assert response["result"] == {
"agent_errors": {
f"{DOMAIN}.{mock_config_entry.entry_id}": (
"Error during backup operation in async_delete_backup: "
"Status None, message: Failed to delete backup"
)
}
"agent_errors": {f"{DOMAIN}.{mock_config_entry.entry_id}": message}
}

View File

@ -84,7 +84,7 @@ async def test_climate_data_update(
freezer: FrozenDateTimeFactory,
mock_serial_bridge: AsyncMock,
mock_serial_bridge_config_entry: MockConfigEntry,
val: list[Any, Any],
val: list[list[Any]],
mode: HVACMode,
temp: float,
) -> None:

View File

@ -91,7 +91,7 @@ async def test_humidifier_data_update(
freezer: FrozenDateTimeFactory,
mock_serial_bridge: AsyncMock,
mock_serial_bridge_config_entry: MockConfigEntry,
val: list[Any, Any],
val: list[list[Any]],
mode: str,
humidity: float,
) -> None:

View File

@ -172,8 +172,11 @@
}),
'name': None,
'options': dict({
'sensor.private': dict({
'suggested_unit_of_measurement': <UnitOfArea.SQUARE_METERS: 'm²'>,
}),
'original_device_class': None,
}),
'original_device_class': <SensorDeviceClass.AREA: 'area'>,
'original_icon': None,
'original_name': 'Area cleaned',
'platform': 'ecovacs',
@ -181,21 +184,22 @@
'supported_features': 0,
'translation_key': 'stats_area',
'unique_id': '8516fbb1-17f1-4194-0000000_stats_area',
'unit_of_measurement': <UnitOfArea.SQUARE_CENTIMETERS: 'cm²'>,
'unit_of_measurement': <UnitOfArea.SQUARE_METERS: 'm²'>,
})
# ---
# name: test_sensors[5xu9h3][sensor.goat_g1_area_cleaned:state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'area',
'friendly_name': 'Goat G1 Area cleaned',
'unit_of_measurement': <UnitOfArea.SQUARE_CENTIMETERS: 'cm²'>,
'unit_of_measurement': <UnitOfArea.SQUARE_METERS: 'm²'>,
}),
'context': <ANY>,
'entity_id': 'sensor.goat_g1_area_cleaned',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '10',
'state': '0.0010',
})
# ---
# name: test_sensors[5xu9h3][sensor.goat_g1_battery:entity-registry]
@ -515,7 +519,7 @@
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_device_class': <SensorDeviceClass.AREA: 'area'>,
'original_icon': None,
'original_name': 'Total area cleaned',
'platform': 'ecovacs',
@ -523,15 +527,16 @@
'supported_features': 0,
'translation_key': 'total_stats_area',
'unique_id': '8516fbb1-17f1-4194-0000000_total_stats_area',
'unit_of_measurement': <UnitOfArea.SQUARE_CENTIMETERS: 'cm²'>,
'unit_of_measurement': <UnitOfArea.SQUARE_METERS: 'm²'>,
})
# ---
# name: test_sensors[5xu9h3][sensor.goat_g1_total_area_cleaned:state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'area',
'friendly_name': 'Goat G1 Total area cleaned',
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
'unit_of_measurement': <UnitOfArea.SQUARE_CENTIMETERS: 'cm²'>,
'unit_of_measurement': <UnitOfArea.SQUARE_METERS: 'm²'>,
}),
'context': <ANY>,
'entity_id': 'sensor.goat_g1_total_area_cleaned',
@ -762,8 +767,11 @@
}),
'name': None,
'options': dict({
'sensor.private': dict({
'suggested_unit_of_measurement': <UnitOfArea.SQUARE_METERS: 'm²'>,
}),
'original_device_class': None,
}),
'original_device_class': <SensorDeviceClass.AREA: 'area'>,
'original_icon': None,
'original_name': 'Area cleaned',
'platform': 'ecovacs',
@ -777,6 +785,7 @@
# name: test_sensors[qhe2o2][sensor.dusty_area_cleaned:state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'area',
'friendly_name': 'Dusty Area cleaned',
'unit_of_measurement': <UnitOfArea.SQUARE_METERS: 'm²'>,
}),
@ -1258,7 +1267,7 @@
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_device_class': <SensorDeviceClass.AREA: 'area'>,
'original_icon': None,
'original_name': 'Total area cleaned',
'platform': 'ecovacs',
@ -1272,6 +1281,7 @@
# name: test_sensors[qhe2o2][sensor.dusty_total_area_cleaned:state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'area',
'friendly_name': 'Dusty Total area cleaned',
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
'unit_of_measurement': <UnitOfArea.SQUARE_METERS: 'm²'>,
@ -1553,8 +1563,11 @@
}),
'name': None,
'options': dict({
'sensor.private': dict({
'suggested_unit_of_measurement': <UnitOfArea.SQUARE_METERS: 'm²'>,
}),
'original_device_class': None,
}),
'original_device_class': <SensorDeviceClass.AREA: 'area'>,
'original_icon': None,
'original_name': 'Area cleaned',
'platform': 'ecovacs',
@ -1568,6 +1581,7 @@
# name: test_sensors[yna5x1][sensor.ozmo_950_area_cleaned:state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'area',
'friendly_name': 'Ozmo 950 Area cleaned',
'unit_of_measurement': <UnitOfArea.SQUARE_METERS: 'm²'>,
}),
@ -1944,7 +1958,7 @@
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_device_class': <SensorDeviceClass.AREA: 'area'>,
'original_icon': None,
'original_name': 'Total area cleaned',
'platform': 'ecovacs',
@ -1958,6 +1972,7 @@
# name: test_sensors[yna5x1][sensor.ozmo_950_total_area_cleaned:state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'area',
'friendly_name': 'Ozmo 950 Total area cleaned',
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
'unit_of_measurement': <UnitOfArea.SQUARE_METERS: 'm²'>,

View File

@ -1017,6 +1017,18 @@ async def test_start_from_history_then_watch_state_changes_sliding(
}
for i, sensor_type in enumerate(["time", "ratio", "count"])
]
+ [
{
"platform": "history_stats",
"entity_id": "binary_sensor.state",
"name": f"sensor_delayed{i}",
"state": "on",
"end": "{{ utcnow()-timedelta(minutes=5) }}",
"duration": {"minutes": 55},
"type": sensor_type,
}
for i, sensor_type in enumerate(["time", "ratio", "count"])
]
},
)
await hass.async_block_till_done()
@ -1028,6 +1040,9 @@ async def test_start_from_history_then_watch_state_changes_sliding(
assert hass.states.get("sensor.sensor0").state == "0.0"
assert hass.states.get("sensor.sensor1").state == "0.0"
assert hass.states.get("sensor.sensor2").state == "0"
assert hass.states.get("sensor.sensor_delayed0").state == "0.0"
assert hass.states.get("sensor.sensor_delayed1").state == "0.0"
assert hass.states.get("sensor.sensor_delayed2").state == "0"
with freeze_time(time):
hass.states.async_set("binary_sensor.state", "on")
@ -1038,6 +1053,10 @@ async def test_start_from_history_then_watch_state_changes_sliding(
assert hass.states.get("sensor.sensor0").state == "0.0"
assert hass.states.get("sensor.sensor1").state == "0.0"
assert hass.states.get("sensor.sensor2").state == "1"
# Delayed sensor will not have registered the turn on yet
assert hass.states.get("sensor.sensor_delayed0").state == "0.0"
assert hass.states.get("sensor.sensor_delayed1").state == "0.0"
assert hass.states.get("sensor.sensor_delayed2").state == "0"
# After sensor has been on for 15 minutes, check state
time += timedelta(minutes=15) # 00:15
@ -1048,6 +1067,10 @@ async def test_start_from_history_then_watch_state_changes_sliding(
assert hass.states.get("sensor.sensor0").state == "0.25"
assert hass.states.get("sensor.sensor1").state == "25.0"
assert hass.states.get("sensor.sensor2").state == "1"
# Delayed sensor will only have data from 00:00 - 00:10
assert hass.states.get("sensor.sensor_delayed0").state == "0.17"
assert hass.states.get("sensor.sensor_delayed1").state == "18.2" # 10 / 55
assert hass.states.get("sensor.sensor_delayed2").state == "1"
with freeze_time(time):
hass.states.async_set("binary_sensor.state", "off")
@ -1064,6 +1087,9 @@ async def test_start_from_history_then_watch_state_changes_sliding(
assert hass.states.get("sensor.sensor0").state == "0.25"
assert hass.states.get("sensor.sensor1").state == "25.0"
assert hass.states.get("sensor.sensor2").state == "1"
assert hass.states.get("sensor.sensor_delayed0").state == "0.25"
assert hass.states.get("sensor.sensor_delayed1").state == "27.3" # 15 / 55
assert hass.states.get("sensor.sensor_delayed2").state == "1"
time += timedelta(minutes=20) # 01:05
@ -1075,6 +1101,9 @@ async def test_start_from_history_then_watch_state_changes_sliding(
assert hass.states.get("sensor.sensor0").state == "0.17"
assert hass.states.get("sensor.sensor1").state == "16.7"
assert hass.states.get("sensor.sensor2").state == "1"
assert hass.states.get("sensor.sensor_delayed0").state == "0.17"
assert hass.states.get("sensor.sensor_delayed1").state == "18.2" # 10 / 55
assert hass.states.get("sensor.sensor_delayed2").state == "1"
time += timedelta(minutes=5) # 01:10
@ -1086,6 +1115,9 @@ async def test_start_from_history_then_watch_state_changes_sliding(
assert hass.states.get("sensor.sensor0").state == "0.08"
assert hass.states.get("sensor.sensor1").state == "8.3"
assert hass.states.get("sensor.sensor2").state == "1"
assert hass.states.get("sensor.sensor_delayed0").state == "0.08"
assert hass.states.get("sensor.sensor_delayed1").state == "9.1" # 5 / 55
assert hass.states.get("sensor.sensor_delayed2").state == "1"
time += timedelta(minutes=10) # 01:20
@ -1096,6 +1128,9 @@ async def test_start_from_history_then_watch_state_changes_sliding(
assert hass.states.get("sensor.sensor0").state == "0.0"
assert hass.states.get("sensor.sensor1").state == "0.0"
assert hass.states.get("sensor.sensor2").state == "0"
assert hass.states.get("sensor.sensor_delayed0").state == "0.0"
assert hass.states.get("sensor.sensor_delayed1").state == "0.0"
assert hass.states.get("sensor.sensor_delayed2").state == "0"
async def test_does_not_work_into_the_future(
@ -1629,7 +1664,7 @@ async def test_state_change_during_window_rollover(
"entity_id": "binary_sensor.state",
"name": "sensor1",
"state": "on",
"start": "{{ today_at() }}",
"start": "{{ today_at('12:00') if now().hour == 1 else today_at() }}",
"end": "{{ now() }}",
"type": "time",
}
@ -1644,7 +1679,7 @@ async def test_state_change_during_window_rollover(
assert hass.states.get("sensor.sensor1").state == "11.0"
# Advance 59 minutes, to record the last minute update just before midnight, just like a real system would do.
t2 = start_time + timedelta(minutes=59, microseconds=300)
t2 = start_time + timedelta(minutes=59, microseconds=300) # 23:59
with freeze_time(t2):
async_fire_time_changed(hass, t2)
await hass.async_block_till_done()
@ -1653,7 +1688,7 @@ async def test_state_change_during_window_rollover(
# One minute has passed and the time has now rolled over into a new day, resetting the recorder window.
# The sensor will be ON since midnight.
t3 = t2 + timedelta(minutes=1)
t3 = t2 + timedelta(minutes=1) # 00:01
with freeze_time(t3):
# The sensor turns off around this time, before the sensor does its normal polled update.
hass.states.async_set("binary_sensor.state", "off")
@ -1662,13 +1697,69 @@ async def test_state_change_during_window_rollover(
assert hass.states.get("sensor.sensor1").state == "0.0"
# More time passes, and the history stats does a polled update again. It should be 0 since the sensor has been off since midnight.
t4 = t3 + timedelta(minutes=10)
# Turn the sensor back on.
t4 = t3 + timedelta(minutes=10) # 00:10
with freeze_time(t4):
async_fire_time_changed(hass, t4)
await hass.async_block_till_done()
hass.states.async_set("binary_sensor.state", "on")
await hass.async_block_till_done()
assert hass.states.get("sensor.sensor1").state == "0.0"
# Due to time change, start time has now moved into the future. Turn off the sensor.
t5 = t4 + timedelta(hours=1) # 01:10
with freeze_time(t5):
hass.states.async_set("binary_sensor.state", "off")
await hass.async_block_till_done(wait_background_tasks=True)
assert hass.states.get("sensor.sensor1").state == STATE_UNKNOWN
# Start time has moved back to start of today. Turn the sensor on at the same time it is recomputed
# Should query the recorder this time due to start time moving backwards in time.
t6 = t5 + timedelta(hours=1) # 02:10
def _fake_states_t6(*args, **kwargs):
return {
"binary_sensor.state": [
ha.State(
"binary_sensor.state",
"off",
last_changed=t6.replace(hour=0, minute=0, second=0, microsecond=0),
),
ha.State(
"binary_sensor.state",
"on",
last_changed=t6.replace(hour=0, minute=10, second=0, microsecond=0),
),
ha.State(
"binary_sensor.state",
"off",
last_changed=t6.replace(hour=1, minute=10, second=0, microsecond=0),
),
]
}
with (
patch(
"homeassistant.components.recorder.history.state_changes_during_period",
_fake_states_t6,
),
freeze_time(t6),
):
hass.states.async_set("binary_sensor.state", "on")
await hass.async_block_till_done(wait_background_tasks=True)
assert hass.states.get("sensor.sensor1").state == "1.0"
# Another hour passes since the re-query. Total 'On' time should be 2 hours (00:10-1:10, 2:10-now (3:10))
t7 = t6 + timedelta(hours=1) # 03:10
with freeze_time(t7):
async_fire_time_changed(hass, t7)
await hass.async_block_till_done()
assert hass.states.get("sensor.sensor1").state == "2.0"
@pytest.mark.parametrize("time_zone", ["Europe/Berlin", "America/Chicago", "US/Hawaii"])
async def test_end_time_with_microseconds_zeroed(
@ -1934,7 +2025,7 @@ async def test_history_stats_handles_floored_timestamps(
await async_update_entity(hass, "sensor.sensor1")
await hass.async_block_till_done()
assert last_times == (start_time, start_time + timedelta(hours=2))
assert last_times == (start_time, start_time)
async def test_unique_id(

View File

@ -3,12 +3,7 @@
from collections.abc import Generator
from unittest.mock import AsyncMock, MagicMock, patch
from pylamarzocco.const import (
FirmwareType,
UpdateCommandStatus,
UpdateProgressInfo,
UpdateStatus,
)
from pylamarzocco.const import FirmwareType, UpdateProgressInfo, UpdateStatus
from pylamarzocco.exceptions import RequestNotSuccessful
from pylamarzocco.models import UpdateDetails
import pytest
@ -61,7 +56,7 @@ async def test_update_process(
mock_lamarzocco.get_firmware.side_effect = [
UpdateDetails(
status=UpdateStatus.TO_UPDATE,
command_status=UpdateCommandStatus.IN_PROGRESS,
command_status=UpdateStatus.IN_PROGRESS,
progress_info=UpdateProgressInfo.STARTING_PROCESS,
progress_percentage=0,
),
@ -139,7 +134,7 @@ async def test_update_times_out(
"""Test error during update."""
mock_lamarzocco.get_firmware.return_value = UpdateDetails(
status=UpdateStatus.TO_UPDATE,
command_status=UpdateCommandStatus.IN_PROGRESS,
command_status=UpdateStatus.IN_PROGRESS,
progress_info=UpdateProgressInfo.STARTING_PROCESS,
progress_percentage=0,
)

View File

@ -66,6 +66,34 @@ async def test_entity(
)
async def test_schedule_update_webhook_event(
hass: HomeAssistant, config_entry: MockConfigEntry, netatmo_auth: AsyncMock
) -> None:
"""Test schedule update webhook event without schedule_id."""
with selected_platforms([Platform.CLIMATE]):
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
webhook_id = config_entry.data[CONF_WEBHOOK_ID]
climate_entity_livingroom = "climate.livingroom"
# Save initial state
initial_state = hass.states.get(climate_entity_livingroom)
# Create a schedule update event without a schedule_id (the event is sent when temperature sets of a schedule are changed)
response = {
"home_id": "91763b24c43d3e344f424e8b",
"event_type": "schedule",
"push_type": "home_event_changed",
}
await simulate_webhook(hass, webhook_id, response)
# State should be unchanged
assert hass.states.get(climate_entity_livingroom) == initial_state
async def test_webhook_event_handling_thermostats(
hass: HomeAssistant, config_entry: MockConfigEntry, netatmo_auth: AsyncMock
) -> None:

View File

@ -55,5 +55,12 @@
"reservations_remaining": 2,
"attachment_total_size": 0,
"attachment_total_size_remaining": 104857600
},
"billing": {
"customer": true,
"subscription": true,
"status": "active",
"interval": "year",
"paid_until": 1754080667
}
}

View File

@ -724,6 +724,57 @@ async def test_cleanup_combined_with_NVR(
reolink_connect.baichuan.mac_address.return_value = TEST_MAC_CAM
async def test_cleanup_hub_and_direct_connection(
hass: HomeAssistant,
config_entry: MockConfigEntry,
reolink_connect: MagicMock,
entity_registry: er.EntityRegistry,
device_registry: dr.DeviceRegistry,
) -> None:
"""Test cleanup of the device registry if IPC camera device was connected directly and through the hub/NVR."""
reolink_connect.channels = [0]
entity_id = f"{TEST_UID}_{TEST_UID_CAM}_record_audio"
dev_id = f"{TEST_UID}_{TEST_UID_CAM}"
domain = Platform.SWITCH
start_identifiers = {
(DOMAIN, dev_id), # IPC camera through hub
(DOMAIN, TEST_UID_CAM), # directly connected IPC camera
("OTHER_INTEGRATION", "SOME_ID"),
}
dev_entry = device_registry.async_get_or_create(
identifiers=start_identifiers,
connections={(CONNECTION_NETWORK_MAC, TEST_MAC_CAM)},
config_entry_id=config_entry.entry_id,
disabled_by=None,
)
entity_registry.async_get_or_create(
domain=domain,
platform=DOMAIN,
unique_id=entity_id,
config_entry=config_entry,
suggested_object_id=entity_id,
disabled_by=None,
device_id=dev_entry.id,
)
assert entity_registry.async_get_entity_id(domain, DOMAIN, entity_id)
device = device_registry.async_get_device(identifiers={(DOMAIN, dev_id)})
assert device
assert device.identifiers == start_identifiers
# setup CH 0 and host entities/device
with patch("homeassistant.components.reolink.PLATFORMS", [domain]):
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert entity_registry.async_get_entity_id(domain, DOMAIN, entity_id)
device = device_registry.async_get_device(identifiers={(DOMAIN, dev_id)})
assert device
assert device.identifiers == start_identifiers
async def test_no_repair_issue(
hass: HomeAssistant, config_entry: MockConfigEntry, issue_registry: ir.IssueRegistry
) -> None:

View File

@ -541,7 +541,7 @@
'hvac_modes': list([
<HVACMode.OFF: 'off'>,
<HVACMode.COOL: 'cool'>,
<HVACMode.AUTO: 'auto'>,
<HVACMode.HEAT_COOL: 'heat_cool'>,
]),
'max_temp': 35.0,
'min_temp': 7.0,
@ -589,7 +589,7 @@
'hvac_modes': list([
<HVACMode.OFF: 'off'>,
<HVACMode.COOL: 'cool'>,
<HVACMode.AUTO: 'auto'>,
<HVACMode.HEAT_COOL: 'heat_cool'>,
]),
'max_temp': 35.0,
'min_temp': 7.0,

View File

@ -625,7 +625,7 @@ async def test_thermostat_set_hvac_mode(
await hass.services.async_call(
CLIMATE_DOMAIN,
SERVICE_SET_HVAC_MODE,
{ATTR_ENTITY_ID: "climate.asd", ATTR_HVAC_MODE: HVACMode.AUTO},
{ATTR_ENTITY_ID: "climate.asd", ATTR_HVAC_MODE: HVACMode.HEAT_COOL},
blocking=True,
)
devices.execute_device_command.assert_called_once_with(

View File

@ -78,12 +78,8 @@
'group_members': list([
]),
'is_volume_muted': True,
'media_album_name': 'None',
'media_artist': 'None',
'media_channel': 'None',
'media_duration': 1,
'media_position': 1,
'media_title': 'None',
'query_result': dict({
}),
'repeat': <RepeatMode.OFF: 'off'>,

View File

@ -72,7 +72,12 @@ from homeassistant.helpers.device_registry import DeviceRegistry
from homeassistant.helpers.entity_registry import EntityRegistry
from homeassistant.util.dt import utcnow
from .conftest import FAKE_VALID_ITEM_ID, TEST_MAC, TEST_VOLUME_STEP
from .conftest import (
FAKE_VALID_ITEM_ID,
TEST_MAC,
TEST_VOLUME_STEP,
configure_squeezebox_media_player_platform,
)
from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform
@ -100,6 +105,33 @@ async def test_entity_registry(
await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id)
async def test_squeezebox_new_player_discovery(
hass: HomeAssistant,
config_entry: MockConfigEntry,
lms: MagicMock,
player_factory: MagicMock,
freezer: FrozenDateTimeFactory,
) -> None:
"""Test discovery of a new squeezebox player."""
# Initial setup with one player (from the 'lms' fixture)
await configure_squeezebox_media_player_platform(hass, config_entry, lms)
await hass.async_block_till_done(wait_background_tasks=True)
assert hass.states.get("media_player.test_player") is not None
assert hass.states.get("media_player.test_player_2") is None
# Simulate a new player appearing
new_player_mock = player_factory(TEST_MAC[1])
lms.async_get_players.return_value = [
lms.async_get_players.return_value[0],
new_player_mock,
]
freezer.tick(timedelta(seconds=DISCOVERY_INTERVAL))
async_fire_time_changed(hass)
await hass.async_block_till_done()
assert hass.states.get("media_player.test_player_2") is not None
async def test_squeezebox_player_rediscovery(
hass: HomeAssistant, configured_player: MagicMock, freezer: FrozenDateTimeFactory
) -> None:

View File

@ -5175,7 +5175,7 @@ async def test_hard_reset_controller(
client.async_send_command.side_effect = async_send_command_no_driver_ready
with patch(
"homeassistant.components.zwave_js.api.HARD_RESET_CONTROLLER_DRIVER_READY_TIMEOUT",
"homeassistant.components.zwave_js.api.DRIVER_READY_TIMEOUT",
new=0,
):
await ws_client.send_json_auto_id(
@ -5551,8 +5551,12 @@ async def test_restore_nvm(
integration,
client,
hass_ws_client: WebSocketGenerator,
get_server_version: AsyncMock,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test the restore NVM websocket command."""
entry = integration
assert entry.unique_id == "3245146787"
ws_client = await hass_ws_client(hass)
# Set up mocks for the controller events
@ -5632,6 +5636,45 @@ async def test_restore_nvm(
},
require_schema=14,
)
assert entry.unique_id == "1234"
client.async_send_command.reset_mock()
# Test client connect error when getting the server version.
get_server_version.side_effect = ClientError("Boom!")
# Send the subscription request
await ws_client.send_json_auto_id(
{
"type": "zwave_js/restore_nvm",
"entry_id": entry.entry_id,
"data": "dGVzdA==", # base64 encoded "test"
}
)
# Verify the finished event first
msg = await ws_client.receive_json()
assert msg["type"] == "event"
assert msg["event"]["event"] == "finished"
# Verify subscription success
msg = await ws_client.receive_json()
assert msg["type"] == "result"
assert msg["success"] is True
assert client.async_send_command.call_count == 3
assert client.async_send_command.call_args_list[0] == call(
{
"command": "controller.restore_nvm",
"nvmData": "dGVzdA==",
},
require_schema=14,
)
assert (
"Failed to get server version, cannot update config entry"
"unique id with new home id, after controller NVM restore"
) in caplog.text
client.async_send_command.reset_mock()
@ -5647,7 +5690,7 @@ async def test_restore_nvm(
client.async_send_command.side_effect = async_send_command_no_driver_ready
with patch(
"homeassistant.components.zwave_js.api.RESTORE_NVM_DRIVER_READY_TIMEOUT",
"homeassistant.components.zwave_js.api.DRIVER_READY_TIMEOUT",
new=0,
):
# Send the subscription request

View File

@ -153,19 +153,6 @@ def mock_sdk_version(client: MagicMock) -> Generator[None]:
client.driver.controller.data["sdkVersion"] = original_sdk_version
@pytest.fixture(name="driver_ready_timeout")
def mock_driver_ready_timeout() -> Generator[None]:
"""Mock migration nvm restore driver ready timeout."""
with patch(
(
"homeassistant.components.zwave_js.config_flow."
"RESTORE_NVM_DRIVER_READY_TIMEOUT"
),
new=0,
):
yield
async def test_manual(hass: HomeAssistant) -> None:
"""Test we create an entry with manual step."""
@ -861,8 +848,11 @@ async def test_usb_discovery_migration(
restart_addon: AsyncMock,
client: MagicMock,
integration: MockConfigEntry,
get_server_version: AsyncMock,
) -> None:
"""Test usb discovery migration."""
version_info = get_server_version.return_value
version_info.home_id = 4321
addon_options["device"] = "/dev/ttyUSB0"
entry = integration
assert client.connect.call_count == 1
@ -887,6 +877,13 @@ async def test_usb_discovery_migration(
side_effect=mock_backup_nvm_raw
)
async def mock_reset_controller():
client.driver.emit(
"driver ready", {"event": "driver ready", "source": "driver"}
)
client.driver.async_hard_reset = AsyncMock(side_effect=mock_reset_controller)
async def mock_restore_nvm(data: bytes):
client.driver.controller.emit(
"nvm convert progress",
@ -918,15 +915,15 @@ async def test_usb_discovery_migration(
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == FlowResultType.FORM
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "intent_migrate"
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == FlowResultType.SHOW_PROGRESS
assert result["type"] is FlowResultType.SHOW_PROGRESS
assert result["step_id"] == "backup_nvm"
with patch("pathlib.Path.write_bytes", MagicMock()) as mock_file:
with patch("pathlib.Path.write_bytes") as mock_file:
await hass.async_block_till_done()
assert client.driver.controller.async_backup_nvm_raw.call_count == 1
assert mock_file.call_count == 1
@ -936,13 +933,14 @@ async def test_usb_discovery_migration(
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] == FlowResultType.FORM
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "instruct_unplug"
assert entry.unique_id == "4321"
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert entry.state is config_entries.ConfigEntryState.NOT_LOADED
assert result["type"] == FlowResultType.SHOW_PROGRESS
assert result["type"] is FlowResultType.SHOW_PROGRESS
assert result["step_id"] == "start_addon"
assert set_addon_options.call_args == call(
"core_zwave_js", AddonsOptions(config={"device": USB_DISCOVERY_INFO.device})
@ -952,9 +950,11 @@ async def test_usb_discovery_migration(
assert restart_addon.call_args == call("core_zwave_js")
version_info.home_id = 5678
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] == FlowResultType.SHOW_PROGRESS
assert result["type"] is FlowResultType.SHOW_PROGRESS
assert result["step_id"] == "restore_nvm"
assert client.connect.call_count == 2
@ -970,9 +970,10 @@ async def test_usb_discovery_migration(
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "migration_successful"
assert integration.data["url"] == "ws://host1:3001"
assert integration.data["usb_path"] == USB_DISCOVERY_INFO.device
assert integration.data["use_addon"] is True
assert entry.data["url"] == "ws://host1:3001"
assert entry.data["usb_path"] == USB_DISCOVERY_INFO.device
assert entry.data["use_addon"] is True
assert entry.unique_id == "5678"
@pytest.mark.usefixtures("supervisor", "addon_running", "get_addon_discovery_info")
@ -989,10 +990,9 @@ async def test_usb_discovery_migration(
]
],
)
async def test_usb_discovery_migration_driver_ready_timeout(
async def test_usb_discovery_migration_restore_driver_ready_timeout(
hass: HomeAssistant,
addon_options: dict[str, Any],
driver_ready_timeout: None,
mock_usb_serial_by_id: MagicMock,
set_addon_options: AsyncMock,
restart_addon: AsyncMock,
@ -1024,6 +1024,13 @@ async def test_usb_discovery_migration_driver_ready_timeout(
side_effect=mock_backup_nvm_raw
)
async def mock_reset_controller():
client.driver.emit(
"driver ready", {"event": "driver ready", "source": "driver"}
)
client.driver.async_hard_reset = AsyncMock(side_effect=mock_reset_controller)
async def mock_restore_nvm(data: bytes):
client.driver.controller.emit(
"nvm convert progress",
@ -1052,15 +1059,15 @@ async def test_usb_discovery_migration_driver_ready_timeout(
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == FlowResultType.FORM
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "intent_migrate"
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == FlowResultType.SHOW_PROGRESS
assert result["type"] is FlowResultType.SHOW_PROGRESS
assert result["step_id"] == "backup_nvm"
with patch("pathlib.Path.write_bytes", MagicMock()) as mock_file:
with patch("pathlib.Path.write_bytes") as mock_file:
await hass.async_block_till_done()
assert client.driver.controller.async_backup_nvm_raw.call_count == 1
assert mock_file.call_count == 1
@ -1070,13 +1077,13 @@ async def test_usb_discovery_migration_driver_ready_timeout(
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] == FlowResultType.FORM
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "instruct_unplug"
assert entry.state is config_entries.ConfigEntryState.NOT_LOADED
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == FlowResultType.SHOW_PROGRESS
assert result["type"] is FlowResultType.SHOW_PROGRESS
assert result["step_id"] == "start_addon"
assert set_addon_options.call_args == call(
"core_zwave_js", AddonsOptions(config={"device": USB_DISCOVERY_INFO.device})
@ -1086,9 +1093,13 @@ async def test_usb_discovery_migration_driver_ready_timeout(
assert restart_addon.call_args == call("core_zwave_js")
with patch(
("homeassistant.components.zwave_js.config_flow.DRIVER_READY_TIMEOUT"),
new=0,
):
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] == FlowResultType.SHOW_PROGRESS
assert result["type"] is FlowResultType.SHOW_PROGRESS
assert result["step_id"] == "restore_nvm"
assert client.connect.call_count == 2
@ -3656,6 +3667,20 @@ async def test_reconfigure_migrate_low_sdk_version(
]
],
)
@pytest.mark.parametrize(
(
"reset_server_version_side_effect",
"reset_unique_id",
"restore_server_version_side_effect",
"final_unique_id",
),
[
(None, "4321", None, "8765"),
(aiohttp.ClientError("Boom"), "1234", None, "8765"),
(None, "4321", aiohttp.ClientError("Boom"), "5678"),
(aiohttp.ClientError("Boom"), "1234", aiohttp.ClientError("Boom"), "5678"),
],
)
async def test_reconfigure_migrate_with_addon(
hass: HomeAssistant,
client,
@ -3665,8 +3690,16 @@ async def test_reconfigure_migrate_with_addon(
restart_addon,
set_addon_options,
get_addon_discovery_info,
get_server_version: AsyncMock,
reset_server_version_side_effect: Exception | None,
reset_unique_id: str,
restore_server_version_side_effect: Exception | None,
final_unique_id: str,
) -> None:
"""Test migration flow with add-on."""
get_server_version.side_effect = reset_server_version_side_effect
version_info = get_server_version.return_value
version_info.home_id = 4321
entry = integration
assert client.connect.call_count == 1
hass.config_entries.async_update_entry(
@ -3690,6 +3723,13 @@ async def test_reconfigure_migrate_with_addon(
side_effect=mock_backup_nvm_raw
)
async def mock_reset_controller():
client.driver.emit(
"driver ready", {"event": "driver ready", "source": "driver"}
)
client.driver.async_hard_reset = AsyncMock(side_effect=mock_reset_controller)
async def mock_restore_nvm(data: bytes):
client.driver.controller.emit(
"nvm convert progress",
@ -3712,22 +3752,22 @@ async def test_reconfigure_migrate_with_addon(
result = await entry.start_reconfigure_flow(hass)
assert result["type"] == FlowResultType.MENU
assert result["type"] is FlowResultType.MENU
assert result["step_id"] == "reconfigure"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"next_step_id": "intent_migrate"}
)
assert result["type"] == FlowResultType.FORM
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "intent_migrate"
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == FlowResultType.SHOW_PROGRESS
assert result["type"] is FlowResultType.SHOW_PROGRESS
assert result["step_id"] == "backup_nvm"
with patch("pathlib.Path.write_bytes", MagicMock()) as mock_file:
with patch("pathlib.Path.write_bytes") as mock_file:
await hass.async_block_till_done()
assert client.driver.controller.async_backup_nvm_raw.call_count == 1
assert mock_file.call_count == 1
@ -3737,16 +3777,21 @@ async def test_reconfigure_migrate_with_addon(
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] == FlowResultType.FORM
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "instruct_unplug"
assert entry.state is config_entries.ConfigEntryState.NOT_LOADED
assert entry.unique_id == reset_unique_id
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == FlowResultType.FORM
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "choose_serial_port"
assert result["data_schema"].schema[CONF_USB_PATH]
# Reset side effect before starting the add-on.
get_server_version.side_effect = None
version_info.home_id = 5678
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
@ -3754,7 +3799,7 @@ async def test_reconfigure_migrate_with_addon(
},
)
assert result["type"] == FlowResultType.SHOW_PROGRESS
assert result["type"] is FlowResultType.SHOW_PROGRESS
assert result["step_id"] == "start_addon"
assert set_addon_options.call_args == call(
"core_zwave_js", AddonsOptions(config={"device": "/test"})
@ -3766,7 +3811,11 @@ async def test_reconfigure_migrate_with_addon(
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] == FlowResultType.SHOW_PROGRESS
assert entry.unique_id == "5678"
get_server_version.side_effect = restore_server_version_side_effect
version_info.home_id = 8765
assert result["type"] is FlowResultType.SHOW_PROGRESS
assert result["step_id"] == "restore_nvm"
assert client.connect.call_count == 2
@ -3782,9 +3831,10 @@ async def test_reconfigure_migrate_with_addon(
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "migration_successful"
assert integration.data["url"] == "ws://host1:3001"
assert integration.data["usb_path"] == "/test"
assert integration.data["use_addon"] is True
assert entry.data["url"] == "ws://host1:3001"
assert entry.data["usb_path"] == "/test"
assert entry.data["use_addon"] is True
assert entry.unique_id == final_unique_id
@pytest.mark.parametrize(
@ -3800,13 +3850,174 @@ async def test_reconfigure_migrate_with_addon(
]
],
)
async def test_reconfigure_migrate_driver_ready_timeout(
async def test_reconfigure_migrate_reset_driver_ready_timeout(
hass: HomeAssistant,
client,
supervisor,
integration,
addon_running,
restart_addon,
set_addon_options,
get_addon_discovery_info,
get_server_version: AsyncMock,
) -> None:
"""Test migration flow with driver ready timeout after controller reset."""
version_info = get_server_version.return_value
version_info.home_id = 4321
entry = integration
assert client.connect.call_count == 1
hass.config_entries.async_update_entry(
entry,
unique_id="1234",
data={
"url": "ws://localhost:3000",
"use_addon": True,
"usb_path": "/dev/ttyUSB0",
},
)
async def mock_backup_nvm_raw():
await asyncio.sleep(0)
client.driver.controller.emit(
"nvm backup progress", {"bytesRead": 100, "total": 200}
)
return b"test_nvm_data"
client.driver.controller.async_backup_nvm_raw = AsyncMock(
side_effect=mock_backup_nvm_raw
)
async def mock_reset_controller():
await asyncio.sleep(0)
client.driver.async_hard_reset = AsyncMock(side_effect=mock_reset_controller)
async def mock_restore_nvm(data: bytes):
client.driver.controller.emit(
"nvm convert progress",
{"event": "nvm convert progress", "bytesRead": 100, "total": 200},
)
await asyncio.sleep(0)
client.driver.controller.emit(
"nvm restore progress",
{"event": "nvm restore progress", "bytesWritten": 100, "total": 200},
)
client.driver.emit(
"driver ready", {"event": "driver ready", "source": "driver"}
)
client.driver.controller.async_restore_nvm = AsyncMock(side_effect=mock_restore_nvm)
events = async_capture_events(
hass, data_entry_flow.EVENT_DATA_ENTRY_FLOW_PROGRESS_UPDATE
)
result = await entry.start_reconfigure_flow(hass)
assert result["type"] is FlowResultType.MENU
assert result["step_id"] == "reconfigure"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"next_step_id": "intent_migrate"}
)
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "intent_migrate"
with (
patch(
("homeassistant.components.zwave_js.config_flow.DRIVER_READY_TIMEOUT"),
new=0,
),
patch("pathlib.Path.write_bytes") as mock_file,
):
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] is FlowResultType.SHOW_PROGRESS
assert result["step_id"] == "backup_nvm"
await hass.async_block_till_done()
assert client.driver.controller.async_backup_nvm_raw.call_count == 1
assert mock_file.call_count == 1
assert len(events) == 1
assert events[0].data["progress"] == 0.5
events.clear()
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "instruct_unplug"
assert entry.state is config_entries.ConfigEntryState.NOT_LOADED
assert entry.unique_id == "4321"
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "choose_serial_port"
assert result["data_schema"].schema[CONF_USB_PATH]
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_USB_PATH: "/test",
},
)
assert result["type"] is FlowResultType.SHOW_PROGRESS
assert result["step_id"] == "start_addon"
assert set_addon_options.call_args == call(
"core_zwave_js", AddonsOptions(config={"device": "/test"})
)
await hass.async_block_till_done()
assert restart_addon.call_args == call("core_zwave_js")
version_info.home_id = 5678
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] is FlowResultType.SHOW_PROGRESS
assert result["step_id"] == "restore_nvm"
assert client.connect.call_count == 2
await hass.async_block_till_done()
assert client.connect.call_count == 3
assert entry.state is config_entries.ConfigEntryState.LOADED
assert client.driver.controller.async_restore_nvm.call_count == 1
assert len(events) == 2
assert events[0].data["progress"] == 0.25
assert events[1].data["progress"] == 0.75
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "migration_successful"
assert entry.data["url"] == "ws://host1:3001"
assert entry.data["usb_path"] == "/test"
assert entry.data["use_addon"] is True
assert entry.unique_id == "5678"
@pytest.mark.parametrize(
"discovery_info",
[
[
Discovery(
addon="core_zwave_js",
service="zwave_js",
uuid=uuid4(),
config=ADDON_DISCOVERY_INFO,
)
]
],
)
async def test_reconfigure_migrate_restore_driver_ready_timeout(
hass: HomeAssistant,
client,
supervisor,
integration,
addon_running,
driver_ready_timeout: None,
restart_addon,
set_addon_options,
get_addon_discovery_info,
@ -3835,6 +4046,13 @@ async def test_reconfigure_migrate_driver_ready_timeout(
side_effect=mock_backup_nvm_raw
)
async def mock_reset_controller():
client.driver.emit(
"driver ready", {"event": "driver ready", "source": "driver"}
)
client.driver.async_hard_reset = AsyncMock(side_effect=mock_reset_controller)
async def mock_restore_nvm(data: bytes):
client.driver.controller.emit(
"nvm convert progress",
@ -3854,22 +4072,22 @@ async def test_reconfigure_migrate_driver_ready_timeout(
result = await entry.start_reconfigure_flow(hass)
assert result["type"] == FlowResultType.MENU
assert result["type"] is FlowResultType.MENU
assert result["step_id"] == "reconfigure"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"next_step_id": "intent_migrate"}
)
assert result["type"] == FlowResultType.FORM
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "intent_migrate"
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == FlowResultType.SHOW_PROGRESS
assert result["type"] is FlowResultType.SHOW_PROGRESS
assert result["step_id"] == "backup_nvm"
with patch("pathlib.Path.write_bytes", MagicMock()) as mock_file:
with patch("pathlib.Path.write_bytes") as mock_file:
await hass.async_block_till_done()
assert client.driver.controller.async_backup_nvm_raw.call_count == 1
assert mock_file.call_count == 1
@ -3879,13 +4097,13 @@ async def test_reconfigure_migrate_driver_ready_timeout(
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] == FlowResultType.FORM
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "instruct_unplug"
assert entry.state is config_entries.ConfigEntryState.NOT_LOADED
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == FlowResultType.FORM
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "choose_serial_port"
assert result["data_schema"].schema[CONF_USB_PATH]
@ -3896,7 +4114,7 @@ async def test_reconfigure_migrate_driver_ready_timeout(
},
)
assert result["type"] == FlowResultType.SHOW_PROGRESS
assert result["type"] is FlowResultType.SHOW_PROGRESS
assert result["step_id"] == "start_addon"
assert set_addon_options.call_args == call(
"core_zwave_js", AddonsOptions(config={"device": "/test"})
@ -3906,9 +4124,13 @@ async def test_reconfigure_migrate_driver_ready_timeout(
assert restart_addon.call_args == call("core_zwave_js")
with patch(
("homeassistant.components.zwave_js.config_flow.DRIVER_READY_TIMEOUT"),
new=0,
):
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] == FlowResultType.SHOW_PROGRESS
assert result["type"] is FlowResultType.SHOW_PROGRESS
assert result["step_id"] == "restore_nvm"
assert client.connect.call_count == 2
@ -3944,19 +4166,19 @@ async def test_reconfigure_migrate_backup_failure(
result = await entry.start_reconfigure_flow(hass)
assert result["type"] == FlowResultType.MENU
assert result["type"] is FlowResultType.MENU
assert result["step_id"] == "reconfigure"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"next_step_id": "intent_migrate"}
)
assert result["type"] == FlowResultType.FORM
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "intent_migrate"
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == FlowResultType.ABORT
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "backup_failed"
@ -3979,30 +4201,28 @@ async def test_reconfigure_migrate_backup_file_failure(
result = await entry.start_reconfigure_flow(hass)
assert result["type"] == FlowResultType.MENU
assert result["type"] is FlowResultType.MENU
assert result["step_id"] == "reconfigure"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"next_step_id": "intent_migrate"}
)
assert result["type"] == FlowResultType.FORM
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "intent_migrate"
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == FlowResultType.SHOW_PROGRESS
assert result["type"] is FlowResultType.SHOW_PROGRESS
assert result["step_id"] == "backup_nvm"
with patch(
"pathlib.Path.write_bytes", MagicMock(side_effect=OSError("test_error"))
):
with patch("pathlib.Path.write_bytes", side_effect=OSError("test_error")):
await hass.async_block_till_done()
assert client.driver.controller.async_backup_nvm_raw.call_count == 1
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] == FlowResultType.ABORT
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "backup_failed"
@ -4041,41 +4261,45 @@ async def test_reconfigure_migrate_start_addon_failure(
client.driver.controller.async_backup_nvm_raw = AsyncMock(
side_effect=mock_backup_nvm_raw
)
client.driver.controller.async_restore_nvm = AsyncMock(
side_effect=FailedCommand("test_error", "unknown_error")
async def mock_reset_controller():
client.driver.emit(
"driver ready", {"event": "driver ready", "source": "driver"}
)
client.driver.async_hard_reset = AsyncMock(side_effect=mock_reset_controller)
result = await entry.start_reconfigure_flow(hass)
assert result["type"] == FlowResultType.MENU
assert result["type"] is FlowResultType.MENU
assert result["step_id"] == "reconfigure"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"next_step_id": "intent_migrate"}
)
assert result["type"] == FlowResultType.FORM
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "intent_migrate"
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == FlowResultType.SHOW_PROGRESS
assert result["type"] is FlowResultType.SHOW_PROGRESS
assert result["step_id"] == "backup_nvm"
with patch("pathlib.Path.write_bytes", MagicMock()) as mock_file:
with patch("pathlib.Path.write_bytes") as mock_file:
await hass.async_block_till_done()
assert client.driver.controller.async_backup_nvm_raw.call_count == 1
assert mock_file.call_count == 1
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] == FlowResultType.FORM
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "instruct_unplug"
assert entry.state is config_entries.ConfigEntryState.NOT_LOADED
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == FlowResultType.FORM
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "choose_serial_port"
result = await hass.config_entries.flow.async_configure(
@ -4090,13 +4314,13 @@ async def test_reconfigure_migrate_start_addon_failure(
"core_zwave_js", AddonsOptions(config={"device": "/test"})
)
assert result["type"] == FlowResultType.SHOW_PROGRESS
assert result["type"] is FlowResultType.SHOW_PROGRESS
assert result["step_id"] == "start_addon"
await hass.async_block_till_done()
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] == FlowResultType.ABORT
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "addon_start_failed"
@ -4136,41 +4360,48 @@ async def test_reconfigure_migrate_restore_failure(
client.driver.controller.async_backup_nvm_raw = AsyncMock(
side_effect=mock_backup_nvm_raw
)
async def mock_reset_controller():
client.driver.emit(
"driver ready", {"event": "driver ready", "source": "driver"}
)
client.driver.async_hard_reset = AsyncMock(side_effect=mock_reset_controller)
client.driver.controller.async_restore_nvm = AsyncMock(
side_effect=FailedCommand("test_error", "unknown_error")
)
result = await entry.start_reconfigure_flow(hass)
assert result["type"] == FlowResultType.MENU
assert result["type"] is FlowResultType.MENU
assert result["step_id"] == "reconfigure"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"next_step_id": "intent_migrate"}
)
assert result["type"] == FlowResultType.FORM
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "intent_migrate"
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == FlowResultType.SHOW_PROGRESS
assert result["type"] is FlowResultType.SHOW_PROGRESS
assert result["step_id"] == "backup_nvm"
with patch("pathlib.Path.write_bytes", MagicMock()) as mock_file:
with patch("pathlib.Path.write_bytes") as mock_file:
await hass.async_block_till_done()
assert client.driver.controller.async_backup_nvm_raw.call_count == 1
assert mock_file.call_count == 1
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] == FlowResultType.FORM
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "instruct_unplug"
assert entry.state is config_entries.ConfigEntryState.NOT_LOADED
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == FlowResultType.FORM
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "choose_serial_port"
result = await hass.config_entries.flow.async_configure(
@ -4180,13 +4411,13 @@ async def test_reconfigure_migrate_restore_failure(
},
)
assert result["type"] == FlowResultType.SHOW_PROGRESS
assert result["type"] is FlowResultType.SHOW_PROGRESS
assert result["step_id"] == "start_addon"
await hass.async_block_till_done()
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] == FlowResultType.SHOW_PROGRESS
assert result["type"] is FlowResultType.SHOW_PROGRESS
assert result["step_id"] == "restore_nvm"
await hass.async_block_till_done()
@ -4195,13 +4426,13 @@ async def test_reconfigure_migrate_restore_failure(
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] == FlowResultType.FORM
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "restore_failed"
assert result["description_placeholders"]["file_path"]
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == FlowResultType.SHOW_PROGRESS
assert result["type"] is FlowResultType.SHOW_PROGRESS
assert result["step_id"] == "restore_nvm"
await hass.async_block_till_done()
@ -4210,7 +4441,7 @@ async def test_reconfigure_migrate_restore_failure(
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] == FlowResultType.FORM
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "restore_failed"
hass.config_entries.flow.async_abort(result["flow_id"])
@ -4218,28 +4449,76 @@ async def test_reconfigure_migrate_restore_failure(
assert len(hass.config_entries.flow.async_progress()) == 0
async def test_get_driver_failure(hass: HomeAssistant, integration, client) -> None:
"""Test get driver failure."""
async def test_get_driver_failure_intent_migrate(
hass: HomeAssistant,
integration: MockConfigEntry,
) -> None:
"""Test get driver failure in intent migrate step."""
entry = integration
hass.config_entries.async_update_entry(
integration, unique_id="1234", data={**integration.data, "use_addon": True}
)
result = await entry.start_reconfigure_flow(hass)
assert result["type"] == FlowResultType.MENU
assert result["type"] is FlowResultType.MENU
assert result["step_id"] == "reconfigure"
await hass.config_entries.async_unload(integration.entry_id)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"next_step_id": "intent_migrate"}
)
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "config_entry_not_loaded"
async def test_get_driver_failure_instruct_unplug(
hass: HomeAssistant,
client: MagicMock,
integration: MockConfigEntry,
) -> None:
"""Test get driver failure in instruct unplug step."""
async def mock_backup_nvm_raw():
await asyncio.sleep(0)
client.driver.controller.emit(
"nvm backup progress", {"bytesRead": 100, "total": 200}
)
return b"test_nvm_data"
client.driver.controller.async_backup_nvm_raw = AsyncMock(
side_effect=mock_backup_nvm_raw
)
entry = integration
hass.config_entries.async_update_entry(
integration, unique_id="1234", data={**integration.data, "use_addon": True}
)
result = await entry.start_reconfigure_flow(hass)
assert result["type"] is FlowResultType.MENU
assert result["step_id"] == "reconfigure"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"next_step_id": "intent_migrate"}
)
assert result["type"] == FlowResultType.FORM
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "intent_migrate"
await hass.config_entries.async_unload(integration.entry_id)
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == FlowResultType.ABORT
assert result["type"] is FlowResultType.SHOW_PROGRESS
assert result["step_id"] == "backup_nvm"
with patch("pathlib.Path.write_bytes") as mock_file:
await hass.async_block_till_done()
assert client.driver.controller.async_backup_nvm_raw.call_count == 1
assert mock_file.call_count == 1
await hass.config_entries.async_unload(integration.entry_id)
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "config_entry_not_loaded"
@ -4263,29 +4542,29 @@ async def test_hard_reset_failure(hass: HomeAssistant, integration, client) -> N
result = await entry.start_reconfigure_flow(hass)
assert result["type"] == FlowResultType.MENU
assert result["type"] is FlowResultType.MENU
assert result["step_id"] == "reconfigure"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"next_step_id": "intent_migrate"}
)
assert result["type"] == FlowResultType.FORM
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "intent_migrate"
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == FlowResultType.SHOW_PROGRESS
assert result["type"] is FlowResultType.SHOW_PROGRESS
assert result["step_id"] == "backup_nvm"
with patch("pathlib.Path.write_bytes", MagicMock()) as mock_file:
with patch("pathlib.Path.write_bytes") as mock_file:
await hass.async_block_till_done()
assert client.driver.controller.async_backup_nvm_raw.call_count == 1
assert mock_file.call_count == 1
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] == FlowResultType.ABORT
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "reset_failed"
@ -4306,31 +4585,38 @@ async def test_choose_serial_port_usb_ports_failure(
side_effect=mock_backup_nvm_raw
)
async def mock_reset_controller():
client.driver.emit(
"driver ready", {"event": "driver ready", "source": "driver"}
)
client.driver.async_hard_reset = AsyncMock(side_effect=mock_reset_controller)
result = await entry.start_reconfigure_flow(hass)
assert result["type"] == FlowResultType.MENU
assert result["type"] is FlowResultType.MENU
assert result["step_id"] == "reconfigure"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"next_step_id": "intent_migrate"}
)
assert result["type"] == FlowResultType.FORM
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "intent_migrate"
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == FlowResultType.SHOW_PROGRESS
assert result["type"] is FlowResultType.SHOW_PROGRESS
assert result["step_id"] == "backup_nvm"
with patch("pathlib.Path.write_bytes", MagicMock()) as mock_file:
with patch("pathlib.Path.write_bytes") as mock_file:
await hass.async_block_till_done()
assert client.driver.controller.async_backup_nvm_raw.call_count == 1
assert mock_file.call_count == 1
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] == FlowResultType.FORM
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "instruct_unplug"
assert entry.state is config_entries.ConfigEntryState.NOT_LOADED
@ -4339,7 +4625,7 @@ async def test_choose_serial_port_usb_ports_failure(
side_effect=OSError("test_error"),
):
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == FlowResultType.ABORT
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "usb_ports_failed"
@ -4350,14 +4636,14 @@ async def test_configure_addon_usb_ports_failure(
entry = integration
result = await entry.start_reconfigure_flow(hass)
assert result["type"] == FlowResultType.MENU
assert result["type"] is FlowResultType.MENU
assert result["step_id"] == "reconfigure"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"next_step_id": "intent_reconfigure"}
)
assert result["type"] == FlowResultType.FORM
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "on_supervisor_reconfigure"
with patch(
@ -4367,5 +4653,5 @@ async def test_configure_addon_usb_ports_failure(
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"use_addon": True}
)
assert result["type"] == FlowResultType.ABORT
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "usb_ports_failed"