mirror of
https://github.com/home-assistant/core.git
synced 2025-07-17 18:27:09 +00:00
2025.5.3 (#145516)
This commit is contained in:
commit
3e6473d130
@ -7,5 +7,5 @@
|
|||||||
"integration_type": "device",
|
"integration_type": "device",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["pyaprilaire"],
|
"loggers": ["pyaprilaire"],
|
||||||
"requirements": ["pyaprilaire==0.8.1"]
|
"requirements": ["pyaprilaire==0.9.0"]
|
||||||
}
|
}
|
||||||
|
@ -2,8 +2,8 @@
|
|||||||
|
|
||||||
from aiohttp import ClientTimeout
|
from aiohttp import ClientTimeout
|
||||||
from azure.core.exceptions import (
|
from azure.core.exceptions import (
|
||||||
|
AzureError,
|
||||||
ClientAuthenticationError,
|
ClientAuthenticationError,
|
||||||
HttpResponseError,
|
|
||||||
ResourceNotFoundError,
|
ResourceNotFoundError,
|
||||||
)
|
)
|
||||||
from azure.core.pipeline.transport._aiohttp import (
|
from azure.core.pipeline.transport._aiohttp import (
|
||||||
@ -70,7 +70,7 @@ async def async_setup_entry(
|
|||||||
translation_key="invalid_auth",
|
translation_key="invalid_auth",
|
||||||
translation_placeholders={CONF_ACCOUNT_NAME: entry.data[CONF_ACCOUNT_NAME]},
|
translation_placeholders={CONF_ACCOUNT_NAME: entry.data[CONF_ACCOUNT_NAME]},
|
||||||
) from err
|
) from err
|
||||||
except HttpResponseError as err:
|
except AzureError as err:
|
||||||
raise ConfigEntryNotReady(
|
raise ConfigEntryNotReady(
|
||||||
translation_domain=DOMAIN,
|
translation_domain=DOMAIN,
|
||||||
translation_key="cannot_connect",
|
translation_key="cannot_connect",
|
||||||
|
@ -8,7 +8,7 @@ import json
|
|||||||
import logging
|
import logging
|
||||||
from typing import Any, Concatenate
|
from typing import Any, Concatenate
|
||||||
|
|
||||||
from azure.core.exceptions import HttpResponseError
|
from azure.core.exceptions import AzureError, HttpResponseError, ServiceRequestError
|
||||||
from azure.storage.blob import BlobProperties
|
from azure.storage.blob import BlobProperties
|
||||||
|
|
||||||
from homeassistant.components.backup import (
|
from homeassistant.components.backup import (
|
||||||
@ -80,6 +80,20 @@ def handle_backup_errors[_R, **P](
|
|||||||
f"Error during backup operation in {func.__name__}:"
|
f"Error during backup operation in {func.__name__}:"
|
||||||
f" Status {err.status_code}, message: {err.message}"
|
f" Status {err.status_code}, message: {err.message}"
|
||||||
) from err
|
) from err
|
||||||
|
except ServiceRequestError as err:
|
||||||
|
raise BackupAgentError(
|
||||||
|
f"Timeout during backup operation in {func.__name__}"
|
||||||
|
) from err
|
||||||
|
except AzureError as err:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Error during backup in %s: %s",
|
||||||
|
func.__name__,
|
||||||
|
err,
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
raise BackupAgentError(
|
||||||
|
f"Error during backup operation in {func.__name__}: {err}"
|
||||||
|
) from err
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
@ -8,5 +8,5 @@
|
|||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["aiocomelit"],
|
"loggers": ["aiocomelit"],
|
||||||
"quality_scale": "bronze",
|
"quality_scale": "bronze",
|
||||||
"requirements": ["aiocomelit==0.12.1"]
|
"requirements": ["aiocomelit==0.12.3"]
|
||||||
}
|
}
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||||
"requirements": ["py-sucks==0.9.10", "deebot-client==13.2.0"]
|
"requirements": ["py-sucks==0.9.10", "deebot-client==13.2.1"]
|
||||||
}
|
}
|
||||||
|
@ -78,7 +78,9 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSensorEntityDescription, ...] = (
|
|||||||
capability_fn=lambda caps: caps.stats.clean,
|
capability_fn=lambda caps: caps.stats.clean,
|
||||||
value_fn=lambda e: e.area,
|
value_fn=lambda e: e.area,
|
||||||
translation_key="stats_area",
|
translation_key="stats_area",
|
||||||
|
device_class=SensorDeviceClass.AREA,
|
||||||
native_unit_of_measurement_fn=get_area_native_unit_of_measurement,
|
native_unit_of_measurement_fn=get_area_native_unit_of_measurement,
|
||||||
|
suggested_unit_of_measurement=UnitOfArea.SQUARE_METERS,
|
||||||
),
|
),
|
||||||
EcovacsSensorEntityDescription[StatsEvent](
|
EcovacsSensorEntityDescription[StatsEvent](
|
||||||
key="stats_time",
|
key="stats_time",
|
||||||
@ -95,7 +97,8 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSensorEntityDescription, ...] = (
|
|||||||
value_fn=lambda e: e.area,
|
value_fn=lambda e: e.area,
|
||||||
key="total_stats_area",
|
key="total_stats_area",
|
||||||
translation_key="total_stats_area",
|
translation_key="total_stats_area",
|
||||||
native_unit_of_measurement_fn=get_area_native_unit_of_measurement,
|
device_class=SensorDeviceClass.AREA,
|
||||||
|
native_unit_of_measurement=UnitOfArea.SQUARE_METERS,
|
||||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||||
),
|
),
|
||||||
EcovacsSensorEntityDescription[TotalStatsEvent](
|
EcovacsSensorEntityDescription[TotalStatsEvent](
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["sense_energy"],
|
"loggers": ["sense_energy"],
|
||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": ["sense-energy==0.13.7"]
|
"requirements": ["sense-energy==0.13.8"]
|
||||||
}
|
}
|
||||||
|
@ -17,7 +17,7 @@ DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS = False
|
|||||||
|
|
||||||
DEFAULT_PORT: Final = 6053
|
DEFAULT_PORT: Final = 6053
|
||||||
|
|
||||||
STABLE_BLE_VERSION_STR = "2025.2.2"
|
STABLE_BLE_VERSION_STR = "2025.5.0"
|
||||||
STABLE_BLE_VERSION = AwesomeVersion(STABLE_BLE_VERSION_STR)
|
STABLE_BLE_VERSION = AwesomeVersion(STABLE_BLE_VERSION_STR)
|
||||||
PROJECT_URLS = {
|
PROJECT_URLS = {
|
||||||
"esphome.bluetooth-proxy": "https://esphome.github.io/bluetooth-proxies/",
|
"esphome.bluetooth-proxy": "https://esphome.github.io/bluetooth-proxies/",
|
||||||
|
@ -7,5 +7,5 @@
|
|||||||
"integration_type": "hub",
|
"integration_type": "hub",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["pyfibaro"],
|
"loggers": ["pyfibaro"],
|
||||||
"requirements": ["pyfibaro==0.8.2"]
|
"requirements": ["pyfibaro==0.8.3"]
|
||||||
}
|
}
|
||||||
|
@ -254,11 +254,11 @@ async def google_generative_ai_config_option_schema(
|
|||||||
)
|
)
|
||||||
for api_model in sorted(api_models, key=lambda x: x.display_name or "")
|
for api_model in sorted(api_models, key=lambda x: x.display_name or "")
|
||||||
if (
|
if (
|
||||||
api_model.name != "models/gemini-1.0-pro" # duplicate of gemini-pro
|
api_model.display_name
|
||||||
and api_model.display_name
|
|
||||||
and api_model.name
|
and api_model.name
|
||||||
and api_model.supported_actions
|
and "tts" not in api_model.name
|
||||||
and "vision" not in api_model.name
|
and "vision" not in api_model.name
|
||||||
|
and api_model.supported_actions
|
||||||
and "generateContent" in api_model.supported_actions
|
and "generateContent" in api_model.supported_actions
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
|
@ -319,11 +319,10 @@ class GoogleGenerativeAIConversationEntity(
|
|||||||
tools.append(Tool(google_search=GoogleSearch()))
|
tools.append(Tool(google_search=GoogleSearch()))
|
||||||
|
|
||||||
model_name = self.entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
model_name = self.entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||||
# Gemini 1.0 doesn't support system_instruction while 1.5 does.
|
# Avoid INVALID_ARGUMENT Developer instruction is not enabled for <model>
|
||||||
# Assume future versions will support it (if not, the request fails with a
|
|
||||||
# clear message at which point we can fix).
|
|
||||||
supports_system_instruction = (
|
supports_system_instruction = (
|
||||||
"gemini-1.0" not in model_name and "gemini-pro" not in model_name
|
"gemma" not in model_name
|
||||||
|
and "gemini-2.0-flash-preview-image-generation" not in model_name
|
||||||
)
|
)
|
||||||
|
|
||||||
prompt_content = cast(
|
prompt_content = cast(
|
||||||
|
@ -41,12 +41,12 @@
|
|||||||
},
|
},
|
||||||
"data_description": {
|
"data_description": {
|
||||||
"prompt": "Instruct how the LLM should respond. This can be a template.",
|
"prompt": "Instruct how the LLM should respond. This can be a template.",
|
||||||
"enable_google_search_tool": "Only works with \"No control\" in the \"Control Home Assistant\" setting. See docs for a workaround using it with \"Assist\"."
|
"enable_google_search_tool": "Only works if there is nothing selected in the \"Control Home Assistant\" setting. See docs for a workaround using it with \"Assist\"."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"error": {
|
"error": {
|
||||||
"invalid_google_search_option": "Google Search cannot be enabled alongside any Assist capability, this can only be used when Assist is set to \"No control\"."
|
"invalid_google_search_option": "Google Search can only be enabled if nothing is selected in the \"Control Home Assistant\" setting."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"services": {
|
"services": {
|
||||||
|
@ -60,6 +60,9 @@ class HistoryStats:
|
|||||||
self._start = start
|
self._start = start
|
||||||
self._end = end
|
self._end = end
|
||||||
|
|
||||||
|
self._pending_events: list[Event[EventStateChangedData]] = []
|
||||||
|
self._query_count = 0
|
||||||
|
|
||||||
async def async_update(
|
async def async_update(
|
||||||
self, event: Event[EventStateChangedData] | None
|
self, event: Event[EventStateChangedData] | None
|
||||||
) -> HistoryStatsState:
|
) -> HistoryStatsState:
|
||||||
@ -85,6 +88,14 @@ class HistoryStats:
|
|||||||
utc_now = dt_util.utcnow()
|
utc_now = dt_util.utcnow()
|
||||||
now_timestamp = floored_timestamp(utc_now)
|
now_timestamp = floored_timestamp(utc_now)
|
||||||
|
|
||||||
|
# If we end up querying data from the recorder when we get triggered by a new state
|
||||||
|
# change event, it is possible this function could be reentered a second time before
|
||||||
|
# the first recorder query returns. In that case a second recorder query will be done
|
||||||
|
# and we need to hold the new event so that we can append it after the second query.
|
||||||
|
# Otherwise the event will be dropped.
|
||||||
|
if event:
|
||||||
|
self._pending_events.append(event)
|
||||||
|
|
||||||
if current_period_start_timestamp > now_timestamp:
|
if current_period_start_timestamp > now_timestamp:
|
||||||
# History cannot tell the future
|
# History cannot tell the future
|
||||||
self._history_current_period = []
|
self._history_current_period = []
|
||||||
@ -113,15 +124,14 @@ class HistoryStats:
|
|||||||
start_changed = (
|
start_changed = (
|
||||||
current_period_start_timestamp != previous_period_start_timestamp
|
current_period_start_timestamp != previous_period_start_timestamp
|
||||||
)
|
)
|
||||||
|
end_changed = current_period_end_timestamp != previous_period_end_timestamp
|
||||||
if start_changed:
|
if start_changed:
|
||||||
self._prune_history_cache(current_period_start_timestamp)
|
self._prune_history_cache(current_period_start_timestamp)
|
||||||
|
|
||||||
new_data = False
|
new_data = False
|
||||||
if event and (new_state := event.data["new_state"]) is not None:
|
if event and (new_state := event.data["new_state"]) is not None:
|
||||||
if (
|
if current_period_start_timestamp <= floored_timestamp(
|
||||||
current_period_start_timestamp
|
new_state.last_changed
|
||||||
<= floored_timestamp(new_state.last_changed)
|
|
||||||
<= current_period_end_timestamp
|
|
||||||
):
|
):
|
||||||
self._history_current_period.append(
|
self._history_current_period.append(
|
||||||
HistoryState(new_state.state, new_state.last_changed_timestamp)
|
HistoryState(new_state.state, new_state.last_changed_timestamp)
|
||||||
@ -131,26 +141,31 @@ class HistoryStats:
|
|||||||
not new_data
|
not new_data
|
||||||
and current_period_end_timestamp < now_timestamp
|
and current_period_end_timestamp < now_timestamp
|
||||||
and not start_changed
|
and not start_changed
|
||||||
|
and not end_changed
|
||||||
):
|
):
|
||||||
# If period has not changed and current time after the period end...
|
# If period has not changed and current time after the period end...
|
||||||
# Don't compute anything as the value cannot have changed
|
# Don't compute anything as the value cannot have changed
|
||||||
return self._state
|
return self._state
|
||||||
else:
|
else:
|
||||||
await self._async_history_from_db(
|
await self._async_history_from_db(
|
||||||
current_period_start_timestamp, current_period_end_timestamp
|
current_period_start_timestamp, now_timestamp
|
||||||
)
|
)
|
||||||
if event and (new_state := event.data["new_state"]) is not None:
|
for pending_event in self._pending_events:
|
||||||
if (
|
if (new_state := pending_event.data["new_state"]) is not None:
|
||||||
current_period_start_timestamp
|
if current_period_start_timestamp <= floored_timestamp(
|
||||||
<= floored_timestamp(new_state.last_changed)
|
new_state.last_changed
|
||||||
<= current_period_end_timestamp
|
):
|
||||||
):
|
self._history_current_period.append(
|
||||||
self._history_current_period.append(
|
HistoryState(
|
||||||
HistoryState(new_state.state, new_state.last_changed_timestamp)
|
new_state.state, new_state.last_changed_timestamp
|
||||||
)
|
)
|
||||||
|
)
|
||||||
|
|
||||||
self._has_recorder_data = True
|
self._has_recorder_data = True
|
||||||
|
|
||||||
|
if self._query_count == 0:
|
||||||
|
self._pending_events.clear()
|
||||||
|
|
||||||
seconds_matched, match_count = self._async_compute_seconds_and_changes(
|
seconds_matched, match_count = self._async_compute_seconds_and_changes(
|
||||||
now_timestamp,
|
now_timestamp,
|
||||||
current_period_start_timestamp,
|
current_period_start_timestamp,
|
||||||
@ -165,12 +180,16 @@ class HistoryStats:
|
|||||||
current_period_end_timestamp: float,
|
current_period_end_timestamp: float,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Update history data for the current period from the database."""
|
"""Update history data for the current period from the database."""
|
||||||
instance = get_instance(self.hass)
|
self._query_count += 1
|
||||||
states = await instance.async_add_executor_job(
|
try:
|
||||||
self._state_changes_during_period,
|
instance = get_instance(self.hass)
|
||||||
current_period_start_timestamp,
|
states = await instance.async_add_executor_job(
|
||||||
current_period_end_timestamp,
|
self._state_changes_during_period,
|
||||||
)
|
current_period_start_timestamp,
|
||||||
|
current_period_end_timestamp,
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
self._query_count -= 1
|
||||||
self._history_current_period = [
|
self._history_current_period = [
|
||||||
HistoryState(state.state, state.last_changed.timestamp())
|
HistoryState(state.state, state.last_changed.timestamp())
|
||||||
for state in states
|
for state in states
|
||||||
@ -208,6 +227,9 @@ class HistoryStats:
|
|||||||
current_state_matches = history_state.state in self._entity_states
|
current_state_matches = history_state.state in self._entity_states
|
||||||
state_change_timestamp = history_state.last_changed
|
state_change_timestamp = history_state.last_changed
|
||||||
|
|
||||||
|
if math.floor(state_change_timestamp) > end_timestamp:
|
||||||
|
break
|
||||||
|
|
||||||
if math.floor(state_change_timestamp) > now_timestamp:
|
if math.floor(state_change_timestamp) > now_timestamp:
|
||||||
# Shouldn't count states that are in the future
|
# Shouldn't count states that are in the future
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
@ -215,7 +237,7 @@ class HistoryStats:
|
|||||||
state_change_timestamp,
|
state_change_timestamp,
|
||||||
now_timestamp,
|
now_timestamp,
|
||||||
)
|
)
|
||||||
continue
|
break
|
||||||
|
|
||||||
if previous_state_matches:
|
if previous_state_matches:
|
||||||
elapsed += state_change_timestamp - last_state_change_timestamp
|
elapsed += state_change_timestamp - last_state_change_timestamp
|
||||||
|
@ -5,5 +5,5 @@
|
|||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"requirements": ["holidays==0.72", "babel==2.15.0"]
|
"requirements": ["holidays==0.73", "babel==2.15.0"]
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,7 @@ from dataclasses import dataclass
|
|||||||
from typing import cast
|
from typing import cast
|
||||||
|
|
||||||
from pylamarzocco import LaMarzoccoMachine
|
from pylamarzocco import LaMarzoccoMachine
|
||||||
from pylamarzocco.const import BackFlushStatus, MachineState, WidgetType
|
from pylamarzocco.const import BackFlushStatus, MachineState, ModelName, WidgetType
|
||||||
from pylamarzocco.models import BackFlush, MachineStatus
|
from pylamarzocco.models import BackFlush, MachineStatus
|
||||||
|
|
||||||
from homeassistant.components.binary_sensor import (
|
from homeassistant.components.binary_sensor import (
|
||||||
@ -66,6 +66,9 @@ ENTITIES: tuple[LaMarzoccoBinarySensorEntityDescription, ...] = (
|
|||||||
is BackFlushStatus.REQUESTED
|
is BackFlushStatus.REQUESTED
|
||||||
),
|
),
|
||||||
entity_category=EntityCategory.DIAGNOSTIC,
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
|
supported_fn=lambda coordinator: (
|
||||||
|
coordinator.device.dashboard.model_name != ModelName.GS3_MP
|
||||||
|
),
|
||||||
),
|
),
|
||||||
LaMarzoccoBinarySensorEntityDescription(
|
LaMarzoccoBinarySensorEntityDescription(
|
||||||
key="websocket_connected",
|
key="websocket_connected",
|
||||||
|
@ -37,5 +37,5 @@
|
|||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["pylamarzocco"],
|
"loggers": ["pylamarzocco"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["pylamarzocco==2.0.3"]
|
"requirements": ["pylamarzocco==2.0.4"]
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,7 @@ import asyncio
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from pylamarzocco.const import FirmwareType, UpdateCommandStatus
|
from pylamarzocco.const import FirmwareType, UpdateStatus
|
||||||
from pylamarzocco.exceptions import RequestNotSuccessful
|
from pylamarzocco.exceptions import RequestNotSuccessful
|
||||||
|
|
||||||
from homeassistant.components.update import (
|
from homeassistant.components.update import (
|
||||||
@ -125,7 +125,7 @@ class LaMarzoccoUpdateEntity(LaMarzoccoEntity, UpdateEntity):
|
|||||||
await self.coordinator.device.update_firmware()
|
await self.coordinator.device.update_firmware()
|
||||||
while (
|
while (
|
||||||
update_progress := await self.coordinator.device.get_firmware()
|
update_progress := await self.coordinator.device.get_firmware()
|
||||||
).command_status is UpdateCommandStatus.IN_PROGRESS:
|
).command_status is UpdateStatus.IN_PROGRESS:
|
||||||
if counter >= MAX_UPDATE_WAIT:
|
if counter >= MAX_UPDATE_WAIT:
|
||||||
_raise_timeout_error()
|
_raise_timeout_error()
|
||||||
self._attr_update_percentage = update_progress.progress_percentage
|
self._attr_update_percentage = update_progress.progress_percentage
|
||||||
|
@ -9,7 +9,11 @@
|
|||||||
"condition_type": {
|
"condition_type": {
|
||||||
"is_locked": "{entity_name} is locked",
|
"is_locked": "{entity_name} is locked",
|
||||||
"is_unlocked": "{entity_name} is unlocked",
|
"is_unlocked": "{entity_name} is unlocked",
|
||||||
"is_open": "{entity_name} is open"
|
"is_open": "{entity_name} is open",
|
||||||
|
"is_jammed": "{entity_name} is jammed",
|
||||||
|
"is_locking": "{entity_name} is locking",
|
||||||
|
"is_unlocking": "{entity_name} is unlocking",
|
||||||
|
"is_opening": "{entity_name} is opening"
|
||||||
},
|
},
|
||||||
"trigger_type": {
|
"trigger_type": {
|
||||||
"locked": "{entity_name} locked",
|
"locked": "{entity_name} locked",
|
||||||
|
@ -475,7 +475,7 @@ class MatrixBot:
|
|||||||
file_stat = await aiofiles.os.stat(image_path)
|
file_stat = await aiofiles.os.stat(image_path)
|
||||||
|
|
||||||
_LOGGER.debug("Uploading file from path, %s", image_path)
|
_LOGGER.debug("Uploading file from path, %s", image_path)
|
||||||
async with aiofiles.open(image_path, "r+b") as image_file:
|
async with aiofiles.open(image_path, "rb") as image_file:
|
||||||
response, _ = await self._client.upload(
|
response, _ = await self._client.upload(
|
||||||
image_file,
|
image_file,
|
||||||
content_type=mime_type,
|
content_type=mime_type,
|
||||||
|
@ -8,6 +8,6 @@
|
|||||||
"iot_class": "calculated",
|
"iot_class": "calculated",
|
||||||
"loggers": ["yt_dlp"],
|
"loggers": ["yt_dlp"],
|
||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": ["yt-dlp[default]==2025.03.31"],
|
"requirements": ["yt-dlp[default]==2025.05.22"],
|
||||||
"single_config_entry": true
|
"single_config_entry": true
|
||||||
}
|
}
|
||||||
|
@ -248,19 +248,22 @@ class NetatmoThermostat(NetatmoRoomEntity, ClimateEntity):
|
|||||||
if self.home.entity_id != data["home_id"]:
|
if self.home.entity_id != data["home_id"]:
|
||||||
return
|
return
|
||||||
|
|
||||||
if data["event_type"] == EVENT_TYPE_SCHEDULE and "schedule_id" in data:
|
if data["event_type"] == EVENT_TYPE_SCHEDULE:
|
||||||
self._selected_schedule = getattr(
|
# handle schedule change
|
||||||
self.hass.data[DOMAIN][DATA_SCHEDULES][self.home.entity_id].get(
|
if "schedule_id" in data:
|
||||||
data["schedule_id"]
|
self._selected_schedule = getattr(
|
||||||
),
|
self.hass.data[DOMAIN][DATA_SCHEDULES][self.home.entity_id].get(
|
||||||
"name",
|
data["schedule_id"]
|
||||||
None,
|
),
|
||||||
)
|
"name",
|
||||||
self._attr_extra_state_attributes[ATTR_SELECTED_SCHEDULE] = (
|
None,
|
||||||
self._selected_schedule
|
)
|
||||||
)
|
self._attr_extra_state_attributes[ATTR_SELECTED_SCHEDULE] = (
|
||||||
self.async_write_ha_state()
|
self._selected_schedule
|
||||||
self.data_handler.async_force_update(self._signal_name)
|
)
|
||||||
|
self.async_write_ha_state()
|
||||||
|
self.data_handler.async_force_update(self._signal_name)
|
||||||
|
# ignore other schedule events
|
||||||
return
|
return
|
||||||
|
|
||||||
home = data["home"]
|
home = data["home"]
|
||||||
|
@ -7,5 +7,5 @@
|
|||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["aionfty"],
|
"loggers": ["aionfty"],
|
||||||
"quality_scale": "bronze",
|
"quality_scale": "bronze",
|
||||||
"requirements": ["aiontfy==0.5.1"]
|
"requirements": ["aiontfy==0.5.2"]
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
{
|
{
|
||||||
"domain": "onedrive",
|
"domain": "onedrive",
|
||||||
"name": "OneDrive",
|
"name": "OneDrive",
|
||||||
|
"after_dependencies": ["cloud"],
|
||||||
"codeowners": ["@zweckj"],
|
"codeowners": ["@zweckj"],
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"dependencies": ["application_credentials"],
|
"dependencies": ["application_credentials"],
|
||||||
|
@ -7,5 +7,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/opower",
|
"documentation": "https://www.home-assistant.io/integrations/opower",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["opower"],
|
"loggers": ["opower"],
|
||||||
"requirements": ["opower==0.12.0"]
|
"requirements": ["opower==0.12.1"]
|
||||||
}
|
}
|
||||||
|
@ -19,9 +19,7 @@ from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon
|
|||||||
MultiprotocolAddonManager,
|
MultiprotocolAddonManager,
|
||||||
get_multiprotocol_addon_manager,
|
get_multiprotocol_addon_manager,
|
||||||
is_multiprotocol_url,
|
is_multiprotocol_url,
|
||||||
multi_pan_addon_using_device,
|
|
||||||
)
|
)
|
||||||
from homeassistant.components.homeassistant_yellow import RADIO_DEVICE as YELLOW_RADIO
|
|
||||||
from homeassistant.config_entries import SOURCE_USER
|
from homeassistant.config_entries import SOURCE_USER
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
@ -34,10 +32,6 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
INFO_URL_SKY_CONNECT = (
|
|
||||||
"https://skyconnect.home-assistant.io/multiprotocol-channel-missmatch"
|
|
||||||
)
|
|
||||||
INFO_URL_YELLOW = "https://yellow.home-assistant.io/multiprotocol-channel-missmatch"
|
|
||||||
|
|
||||||
INSECURE_NETWORK_KEYS = (
|
INSECURE_NETWORK_KEYS = (
|
||||||
# Thread web UI default
|
# Thread web UI default
|
||||||
@ -208,16 +202,12 @@ async def _warn_on_channel_collision(
|
|||||||
delete_issue()
|
delete_issue()
|
||||||
return
|
return
|
||||||
|
|
||||||
yellow = await multi_pan_addon_using_device(hass, YELLOW_RADIO)
|
|
||||||
learn_more_url = INFO_URL_YELLOW if yellow else INFO_URL_SKY_CONNECT
|
|
||||||
|
|
||||||
ir.async_create_issue(
|
ir.async_create_issue(
|
||||||
hass,
|
hass,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
f"otbr_zha_channel_collision_{otbrdata.entry_id}",
|
f"otbr_zha_channel_collision_{otbrdata.entry_id}",
|
||||||
is_fixable=False,
|
is_fixable=False,
|
||||||
is_persistent=False,
|
is_persistent=False,
|
||||||
learn_more_url=learn_more_url,
|
|
||||||
severity=ir.IssueSeverity.WARNING,
|
severity=ir.IssueSeverity.WARNING,
|
||||||
translation_key="otbr_zha_channel_collision",
|
translation_key="otbr_zha_channel_collision",
|
||||||
translation_placeholders={
|
translation_placeholders={
|
||||||
|
@ -6,6 +6,7 @@ from contextlib import contextmanager, nullcontext
|
|||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
import warnings
|
||||||
|
|
||||||
from qnapstats import QNAPStats
|
from qnapstats import QNAPStats
|
||||||
import urllib3
|
import urllib3
|
||||||
@ -37,7 +38,8 @@ def suppress_insecure_request_warning():
|
|||||||
Was added in here to solve the following issue, not being solved upstream.
|
Was added in here to solve the following issue, not being solved upstream.
|
||||||
https://github.com/colinodell/python-qnapstats/issues/96
|
https://github.com/colinodell/python-qnapstats/issues/96
|
||||||
"""
|
"""
|
||||||
with urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning):
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("ignore", urllib3.exceptions.InsecureRequestWarning)
|
||||||
yield
|
yield
|
||||||
|
|
||||||
|
|
||||||
|
@ -364,90 +364,88 @@ def migrate_entity_ids(
|
|||||||
devices = dr.async_entries_for_config_entry(device_reg, config_entry_id)
|
devices = dr.async_entries_for_config_entry(device_reg, config_entry_id)
|
||||||
ch_device_ids = {}
|
ch_device_ids = {}
|
||||||
for device in devices:
|
for device in devices:
|
||||||
for dev_id in device.identifiers:
|
(device_uid, ch, is_chime) = get_device_uid_and_ch(device, host)
|
||||||
(device_uid, ch, is_chime) = get_device_uid_and_ch(dev_id, host)
|
|
||||||
if not device_uid:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if host.api.supported(None, "UID") and device_uid[0] != host.unique_id:
|
if host.api.supported(None, "UID") and device_uid[0] != host.unique_id:
|
||||||
if ch is None:
|
if ch is None:
|
||||||
new_device_id = f"{host.unique_id}"
|
new_device_id = f"{host.unique_id}"
|
||||||
else:
|
else:
|
||||||
new_device_id = f"{host.unique_id}_{device_uid[1]}"
|
new_device_id = f"{host.unique_id}_{device_uid[1]}"
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Updating Reolink device UID from %s to %s",
|
"Updating Reolink device UID from %s to %s",
|
||||||
device_uid,
|
device_uid,
|
||||||
new_device_id,
|
new_device_id,
|
||||||
)
|
)
|
||||||
new_identifiers = {(DOMAIN, new_device_id)}
|
new_identifiers = {(DOMAIN, new_device_id)}
|
||||||
device_reg.async_update_device(
|
device_reg.async_update_device(device.id, new_identifiers=new_identifiers)
|
||||||
device.id, new_identifiers=new_identifiers
|
|
||||||
)
|
|
||||||
|
|
||||||
if ch is None or is_chime:
|
# Check for wrongfully combined entities in one device
|
||||||
continue # Do not consider the NVR itself or chimes
|
# Can be removed in HA 2025.12
|
||||||
|
new_identifiers = device.identifiers.copy()
|
||||||
# Check for wrongfully combined host with NVR entities in one device
|
remove_ids = False
|
||||||
# Can be removed in HA 2025.12
|
if (DOMAIN, host.unique_id) in device.identifiers:
|
||||||
if (DOMAIN, host.unique_id) in device.identifiers:
|
remove_ids = True # NVR/Hub in identifiers, keep that one, remove others
|
||||||
new_identifiers = device.identifiers.copy()
|
for old_id in device.identifiers:
|
||||||
for old_id in device.identifiers:
|
(old_device_uid, old_ch, old_is_chime) = get_device_uid_and_ch(old_id, host)
|
||||||
if old_id[0] == DOMAIN and old_id[1] != host.unique_id:
|
if (
|
||||||
new_identifiers.remove(old_id)
|
not old_device_uid
|
||||||
_LOGGER.debug(
|
or old_device_uid[0] != host.unique_id
|
||||||
"Updating Reolink device identifiers from %s to %s",
|
or old_id[1] == host.unique_id
|
||||||
device.identifiers,
|
|
||||||
new_identifiers,
|
|
||||||
)
|
|
||||||
device_reg.async_update_device(
|
|
||||||
device.id, new_identifiers=new_identifiers
|
|
||||||
)
|
|
||||||
break
|
|
||||||
|
|
||||||
# Check for wrongfully added MAC of the NVR/Hub to the camera
|
|
||||||
# Can be removed in HA 2025.12
|
|
||||||
host_connnection = (CONNECTION_NETWORK_MAC, host.api.mac_address)
|
|
||||||
if host_connnection in device.connections:
|
|
||||||
new_connections = device.connections.copy()
|
|
||||||
new_connections.remove(host_connnection)
|
|
||||||
_LOGGER.debug(
|
|
||||||
"Updating Reolink device connections from %s to %s",
|
|
||||||
device.connections,
|
|
||||||
new_connections,
|
|
||||||
)
|
|
||||||
device_reg.async_update_device(
|
|
||||||
device.id, new_connections=new_connections
|
|
||||||
)
|
|
||||||
|
|
||||||
ch_device_ids[device.id] = ch
|
|
||||||
if host.api.supported(ch, "UID") and device_uid[1] != host.api.camera_uid(
|
|
||||||
ch
|
|
||||||
):
|
):
|
||||||
if host.api.supported(None, "UID"):
|
continue
|
||||||
new_device_id = f"{host.unique_id}_{host.api.camera_uid(ch)}"
|
if remove_ids:
|
||||||
else:
|
new_identifiers.remove(old_id)
|
||||||
new_device_id = f"{device_uid[0]}_{host.api.camera_uid(ch)}"
|
remove_ids = True # after the first identifier, remove the others
|
||||||
_LOGGER.debug(
|
if new_identifiers != device.identifiers:
|
||||||
"Updating Reolink device UID from %s to %s",
|
_LOGGER.debug(
|
||||||
device_uid,
|
"Updating Reolink device identifiers from %s to %s",
|
||||||
|
device.identifiers,
|
||||||
|
new_identifiers,
|
||||||
|
)
|
||||||
|
device_reg.async_update_device(device.id, new_identifiers=new_identifiers)
|
||||||
|
break
|
||||||
|
|
||||||
|
if ch is None or is_chime:
|
||||||
|
continue # Do not consider the NVR itself or chimes
|
||||||
|
|
||||||
|
# Check for wrongfully added MAC of the NVR/Hub to the camera
|
||||||
|
# Can be removed in HA 2025.12
|
||||||
|
host_connnection = (CONNECTION_NETWORK_MAC, host.api.mac_address)
|
||||||
|
if host_connnection in device.connections:
|
||||||
|
new_connections = device.connections.copy()
|
||||||
|
new_connections.remove(host_connnection)
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Updating Reolink device connections from %s to %s",
|
||||||
|
device.connections,
|
||||||
|
new_connections,
|
||||||
|
)
|
||||||
|
device_reg.async_update_device(device.id, new_connections=new_connections)
|
||||||
|
|
||||||
|
ch_device_ids[device.id] = ch
|
||||||
|
if host.api.supported(ch, "UID") and device_uid[1] != host.api.camera_uid(ch):
|
||||||
|
if host.api.supported(None, "UID"):
|
||||||
|
new_device_id = f"{host.unique_id}_{host.api.camera_uid(ch)}"
|
||||||
|
else:
|
||||||
|
new_device_id = f"{device_uid[0]}_{host.api.camera_uid(ch)}"
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Updating Reolink device UID from %s to %s",
|
||||||
|
device_uid,
|
||||||
|
new_device_id,
|
||||||
|
)
|
||||||
|
new_identifiers = {(DOMAIN, new_device_id)}
|
||||||
|
existing_device = device_reg.async_get_device(identifiers=new_identifiers)
|
||||||
|
if existing_device is None:
|
||||||
|
device_reg.async_update_device(
|
||||||
|
device.id, new_identifiers=new_identifiers
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Reolink device with uid %s already exists, "
|
||||||
|
"removing device with uid %s",
|
||||||
new_device_id,
|
new_device_id,
|
||||||
|
device_uid,
|
||||||
)
|
)
|
||||||
new_identifiers = {(DOMAIN, new_device_id)}
|
device_reg.async_remove_device(device.id)
|
||||||
existing_device = device_reg.async_get_device(
|
|
||||||
identifiers=new_identifiers
|
|
||||||
)
|
|
||||||
if existing_device is None:
|
|
||||||
device_reg.async_update_device(
|
|
||||||
device.id, new_identifiers=new_identifiers
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Reolink device with uid %s already exists, "
|
|
||||||
"removing device with uid %s",
|
|
||||||
new_device_id,
|
|
||||||
device_uid,
|
|
||||||
)
|
|
||||||
device_reg.async_remove_device(device.id)
|
|
||||||
|
|
||||||
entity_reg = er.async_get(hass)
|
entity_reg = er.async_get(hass)
|
||||||
entities = er.async_entries_for_config_entry(entity_reg, config_entry_id)
|
entities = er.async_entries_for_config_entry(entity_reg, config_entry_id)
|
||||||
|
@ -20,5 +20,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/sense",
|
"documentation": "https://www.home-assistant.io/integrations/sense",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["sense_energy"],
|
"loggers": ["sense_energy"],
|
||||||
"requirements": ["sense-energy==0.13.7"]
|
"requirements": ["sense-energy==0.13.8"]
|
||||||
}
|
}
|
||||||
|
@ -31,7 +31,7 @@ from .entity import SmartThingsEntity
|
|||||||
|
|
||||||
ATTR_OPERATION_STATE = "operation_state"
|
ATTR_OPERATION_STATE = "operation_state"
|
||||||
MODE_TO_STATE = {
|
MODE_TO_STATE = {
|
||||||
"auto": HVACMode.AUTO,
|
"auto": HVACMode.HEAT_COOL,
|
||||||
"cool": HVACMode.COOL,
|
"cool": HVACMode.COOL,
|
||||||
"eco": HVACMode.AUTO,
|
"eco": HVACMode.AUTO,
|
||||||
"rush hour": HVACMode.AUTO,
|
"rush hour": HVACMode.AUTO,
|
||||||
@ -40,7 +40,7 @@ MODE_TO_STATE = {
|
|||||||
"off": HVACMode.OFF,
|
"off": HVACMode.OFF,
|
||||||
}
|
}
|
||||||
STATE_TO_MODE = {
|
STATE_TO_MODE = {
|
||||||
HVACMode.AUTO: "auto",
|
HVACMode.HEAT_COOL: "auto",
|
||||||
HVACMode.COOL: "cool",
|
HVACMode.COOL: "cool",
|
||||||
HVACMode.HEAT: "heat",
|
HVACMode.HEAT: "heat",
|
||||||
HVACMode.OFF: "off",
|
HVACMode.OFF: "off",
|
||||||
|
@ -30,5 +30,5 @@
|
|||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["pysmartthings"],
|
"loggers": ["pysmartthings"],
|
||||||
"quality_scale": "bronze",
|
"quality_scale": "bronze",
|
||||||
"requirements": ["pysmartthings==3.2.2"]
|
"requirements": ["pysmartthings==3.2.3"]
|
||||||
}
|
}
|
||||||
|
@ -151,6 +151,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: SqueezeboxConfigEntry) -
|
|||||||
player_coordinator = SqueezeBoxPlayerUpdateCoordinator(
|
player_coordinator = SqueezeBoxPlayerUpdateCoordinator(
|
||||||
hass, entry, player, lms.uuid
|
hass, entry, player, lms.uuid
|
||||||
)
|
)
|
||||||
|
await player_coordinator.async_refresh()
|
||||||
known_players.append(player.player_id)
|
known_players.append(player.player_id)
|
||||||
async_dispatcher_send(
|
async_dispatcher_send(
|
||||||
hass, SIGNAL_PLAYER_DISCOVERED, player_coordinator
|
hass, SIGNAL_PLAYER_DISCOVERED, player_coordinator
|
||||||
|
@ -12,5 +12,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/squeezebox",
|
"documentation": "https://www.home-assistant.io/integrations/squeezebox",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["pysqueezebox"],
|
"loggers": ["pysqueezebox"],
|
||||||
"requirements": ["pysqueezebox==0.12.0"]
|
"requirements": ["pysqueezebox==0.12.1"]
|
||||||
}
|
}
|
||||||
|
@ -6,7 +6,7 @@ from collections.abc import Callable
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
from typing import TYPE_CHECKING, Any
|
from typing import TYPE_CHECKING, Any, cast
|
||||||
|
|
||||||
from pysqueezebox import Server, async_discover
|
from pysqueezebox import Server, async_discover
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
@ -329,22 +329,22 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
|||||||
@property
|
@property
|
||||||
def media_title(self) -> str | None:
|
def media_title(self) -> str | None:
|
||||||
"""Title of current playing media."""
|
"""Title of current playing media."""
|
||||||
return str(self._player.title)
|
return cast(str | None, self._player.title)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def media_channel(self) -> str | None:
|
def media_channel(self) -> str | None:
|
||||||
"""Channel (e.g. webradio name) of current playing media."""
|
"""Channel (e.g. webradio name) of current playing media."""
|
||||||
return str(self._player.remote_title)
|
return cast(str | None, self._player.remote_title)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def media_artist(self) -> str | None:
|
def media_artist(self) -> str | None:
|
||||||
"""Artist of current playing media."""
|
"""Artist of current playing media."""
|
||||||
return str(self._player.artist)
|
return cast(str | None, self._player.artist)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def media_album_name(self) -> str | None:
|
def media_album_name(self) -> str | None:
|
||||||
"""Album of current playing media."""
|
"""Album of current playing media."""
|
||||||
return str(self._player.album)
|
return cast(str | None, self._player.album)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def repeat(self) -> RepeatMode:
|
def repeat(self) -> RepeatMode:
|
||||||
|
@ -236,7 +236,7 @@ class SynologyDSMBackupAgent(BackupAgent):
|
|||||||
raise BackupAgentError("Failed to read meta data") from err
|
raise BackupAgentError("Failed to read meta data") from err
|
||||||
|
|
||||||
try:
|
try:
|
||||||
files = await self._file_station.get_files(path=self.path)
|
files = await self._file_station.get_files(path=self.path, limit=1000)
|
||||||
except SynologyDSMAPIErrorException as err:
|
except SynologyDSMAPIErrorException as err:
|
||||||
raise BackupAgentError("Failed to list backups") from err
|
raise BackupAgentError("Failed to list backups") from err
|
||||||
|
|
||||||
|
@ -7,7 +7,7 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/synology_dsm",
|
"documentation": "https://www.home-assistant.io/integrations/synology_dsm",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["synology_dsm"],
|
"loggers": ["synology_dsm"],
|
||||||
"requirements": ["py-synologydsm-api==2.7.1"],
|
"requirements": ["py-synologydsm-api==2.7.2"],
|
||||||
"ssdp": [
|
"ssdp": [
|
||||||
{
|
{
|
||||||
"manufacturer": "Synology",
|
"manufacturer": "Synology",
|
||||||
|
@ -14,7 +14,7 @@
|
|||||||
"velbus-protocol"
|
"velbus-protocol"
|
||||||
],
|
],
|
||||||
"quality_scale": "bronze",
|
"quality_scale": "bronze",
|
||||||
"requirements": ["velbus-aio==2025.4.2"],
|
"requirements": ["velbus-aio==2025.5.0"],
|
||||||
"usb": [
|
"usb": [
|
||||||
{
|
{
|
||||||
"vid": "10CF",
|
"vid": "10CF",
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@ -17,7 +18,8 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
|||||||
from . import WebControlProConfigEntry
|
from . import WebControlProConfigEntry
|
||||||
from .entity import WebControlProGenericEntity
|
from .entity import WebControlProGenericEntity
|
||||||
|
|
||||||
SCAN_INTERVAL = timedelta(seconds=5)
|
ACTION_DELAY = 0.5
|
||||||
|
SCAN_INTERVAL = timedelta(seconds=10)
|
||||||
PARALLEL_UPDATES = 1
|
PARALLEL_UPDATES = 1
|
||||||
|
|
||||||
|
|
||||||
@ -56,6 +58,7 @@ class WebControlProCover(WebControlProGenericEntity, CoverEntity):
|
|||||||
"""Move the cover to a specific position."""
|
"""Move the cover to a specific position."""
|
||||||
action = self._dest.action(self._drive_action_desc)
|
action = self._dest.action(self._drive_action_desc)
|
||||||
await action(percentage=100 - kwargs[ATTR_POSITION])
|
await action(percentage=100 - kwargs[ATTR_POSITION])
|
||||||
|
await asyncio.sleep(ACTION_DELAY)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_closed(self) -> bool | None:
|
def is_closed(self) -> bool | None:
|
||||||
@ -66,11 +69,13 @@ class WebControlProCover(WebControlProGenericEntity, CoverEntity):
|
|||||||
"""Open the cover."""
|
"""Open the cover."""
|
||||||
action = self._dest.action(self._drive_action_desc)
|
action = self._dest.action(self._drive_action_desc)
|
||||||
await action(percentage=0)
|
await action(percentage=0)
|
||||||
|
await asyncio.sleep(ACTION_DELAY)
|
||||||
|
|
||||||
async def async_close_cover(self, **kwargs: Any) -> None:
|
async def async_close_cover(self, **kwargs: Any) -> None:
|
||||||
"""Close the cover."""
|
"""Close the cover."""
|
||||||
action = self._dest.action(self._drive_action_desc)
|
action = self._dest.action(self._drive_action_desc)
|
||||||
await action(percentage=100)
|
await action(percentage=100)
|
||||||
|
await asyncio.sleep(ACTION_DELAY)
|
||||||
|
|
||||||
async def async_stop_cover(self, **kwargs: Any) -> None:
|
async def async_stop_cover(self, **kwargs: Any) -> None:
|
||||||
"""Stop the device if in motion."""
|
"""Stop the device if in motion."""
|
||||||
@ -79,6 +84,7 @@ class WebControlProCover(WebControlProGenericEntity, CoverEntity):
|
|||||||
WMS_WebControl_pro_API_actionType.Stop,
|
WMS_WebControl_pro_API_actionType.Stop,
|
||||||
)
|
)
|
||||||
await action()
|
await action()
|
||||||
|
await asyncio.sleep(ACTION_DELAY)
|
||||||
|
|
||||||
|
|
||||||
class WebControlProAwning(WebControlProCover):
|
class WebControlProAwning(WebControlProCover):
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@ -16,7 +17,8 @@ from . import WebControlProConfigEntry
|
|||||||
from .const import BRIGHTNESS_SCALE
|
from .const import BRIGHTNESS_SCALE
|
||||||
from .entity import WebControlProGenericEntity
|
from .entity import WebControlProGenericEntity
|
||||||
|
|
||||||
SCAN_INTERVAL = timedelta(seconds=5)
|
ACTION_DELAY = 0.5
|
||||||
|
SCAN_INTERVAL = timedelta(seconds=15)
|
||||||
PARALLEL_UPDATES = 1
|
PARALLEL_UPDATES = 1
|
||||||
|
|
||||||
|
|
||||||
@ -54,11 +56,13 @@ class WebControlProLight(WebControlProGenericEntity, LightEntity):
|
|||||||
"""Turn the light on."""
|
"""Turn the light on."""
|
||||||
action = self._dest.action(WMS_WebControl_pro_API_actionDescription.LightSwitch)
|
action = self._dest.action(WMS_WebControl_pro_API_actionDescription.LightSwitch)
|
||||||
await action(onOffState=True)
|
await action(onOffState=True)
|
||||||
|
await asyncio.sleep(ACTION_DELAY)
|
||||||
|
|
||||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||||
"""Turn the light off."""
|
"""Turn the light off."""
|
||||||
action = self._dest.action(WMS_WebControl_pro_API_actionDescription.LightSwitch)
|
action = self._dest.action(WMS_WebControl_pro_API_actionDescription.LightSwitch)
|
||||||
await action(onOffState=False)
|
await action(onOffState=False)
|
||||||
|
await asyncio.sleep(ACTION_DELAY)
|
||||||
|
|
||||||
|
|
||||||
class WebControlProDimmer(WebControlProLight):
|
class WebControlProDimmer(WebControlProLight):
|
||||||
@ -87,3 +91,4 @@ class WebControlProDimmer(WebControlProLight):
|
|||||||
await action(
|
await action(
|
||||||
percentage=brightness_to_value(BRIGHTNESS_SCALE, kwargs[ATTR_BRIGHTNESS])
|
percentage=brightness_to_value(BRIGHTNESS_SCALE, kwargs[ATTR_BRIGHTNESS])
|
||||||
)
|
)
|
||||||
|
await asyncio.sleep(ACTION_DELAY)
|
||||||
|
@ -7,5 +7,5 @@
|
|||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["holidays"],
|
"loggers": ["holidays"],
|
||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": ["holidays==0.72"]
|
"requirements": ["holidays==0.73"]
|
||||||
}
|
}
|
||||||
|
@ -105,6 +105,7 @@ from .const import (
|
|||||||
CONF_USE_ADDON,
|
CONF_USE_ADDON,
|
||||||
DATA_CLIENT,
|
DATA_CLIENT,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
|
DRIVER_READY_TIMEOUT,
|
||||||
EVENT_DEVICE_ADDED_TO_REGISTRY,
|
EVENT_DEVICE_ADDED_TO_REGISTRY,
|
||||||
EVENT_VALUE_UPDATED,
|
EVENT_VALUE_UPDATED,
|
||||||
LIB_LOGGER,
|
LIB_LOGGER,
|
||||||
@ -135,7 +136,6 @@ from .services import ZWaveServices
|
|||||||
|
|
||||||
CONNECT_TIMEOUT = 10
|
CONNECT_TIMEOUT = 10
|
||||||
DATA_DRIVER_EVENTS = "driver_events"
|
DATA_DRIVER_EVENTS = "driver_events"
|
||||||
DRIVER_READY_TIMEOUT = 60
|
|
||||||
|
|
||||||
CONFIG_SCHEMA = vol.Schema(
|
CONFIG_SCHEMA = vol.Schema(
|
||||||
{
|
{
|
||||||
|
@ -88,9 +88,9 @@ from .const import (
|
|||||||
CONF_INSTALLER_MODE,
|
CONF_INSTALLER_MODE,
|
||||||
DATA_CLIENT,
|
DATA_CLIENT,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
|
DRIVER_READY_TIMEOUT,
|
||||||
EVENT_DEVICE_ADDED_TO_REGISTRY,
|
EVENT_DEVICE_ADDED_TO_REGISTRY,
|
||||||
LOGGER,
|
LOGGER,
|
||||||
RESTORE_NVM_DRIVER_READY_TIMEOUT,
|
|
||||||
USER_AGENT,
|
USER_AGENT,
|
||||||
)
|
)
|
||||||
from .helpers import (
|
from .helpers import (
|
||||||
@ -189,8 +189,6 @@ STRATEGY = "strategy"
|
|||||||
# https://github.com/zwave-js/node-zwave-js/blob/master/packages/core/src/security/QR.ts#L41
|
# https://github.com/zwave-js/node-zwave-js/blob/master/packages/core/src/security/QR.ts#L41
|
||||||
MINIMUM_QR_STRING_LENGTH = 52
|
MINIMUM_QR_STRING_LENGTH = 52
|
||||||
|
|
||||||
HARD_RESET_CONTROLLER_DRIVER_READY_TIMEOUT = 60
|
|
||||||
|
|
||||||
|
|
||||||
# Helper schemas
|
# Helper schemas
|
||||||
PLANNED_PROVISIONING_ENTRY_SCHEMA = vol.All(
|
PLANNED_PROVISIONING_ENTRY_SCHEMA = vol.All(
|
||||||
@ -2858,7 +2856,7 @@ async def websocket_hard_reset_controller(
|
|||||||
await driver.async_hard_reset()
|
await driver.async_hard_reset()
|
||||||
|
|
||||||
with suppress(TimeoutError):
|
with suppress(TimeoutError):
|
||||||
async with asyncio.timeout(HARD_RESET_CONTROLLER_DRIVER_READY_TIMEOUT):
|
async with asyncio.timeout(DRIVER_READY_TIMEOUT):
|
||||||
await wait_driver_ready.wait()
|
await wait_driver_ready.wait()
|
||||||
|
|
||||||
# When resetting the controller, the controller home id is also changed.
|
# When resetting the controller, the controller home id is also changed.
|
||||||
@ -3105,8 +3103,29 @@ async def websocket_restore_nvm(
|
|||||||
await controller.async_restore_nvm_base64(msg["data"])
|
await controller.async_restore_nvm_base64(msg["data"])
|
||||||
|
|
||||||
with suppress(TimeoutError):
|
with suppress(TimeoutError):
|
||||||
async with asyncio.timeout(RESTORE_NVM_DRIVER_READY_TIMEOUT):
|
async with asyncio.timeout(DRIVER_READY_TIMEOUT):
|
||||||
await wait_driver_ready.wait()
|
await wait_driver_ready.wait()
|
||||||
|
|
||||||
|
# When restoring the NVM to the controller, the controller home id is also changed.
|
||||||
|
# The controller state in the client is stale after restoring the NVM,
|
||||||
|
# so get the new home id with a new client using the helper function.
|
||||||
|
# The client state will be refreshed by reloading the config entry,
|
||||||
|
# after the unique id of the config entry has been updated.
|
||||||
|
try:
|
||||||
|
version_info = await async_get_version_info(hass, entry.data[CONF_URL])
|
||||||
|
except CannotConnect:
|
||||||
|
# Just log this error, as there's nothing to do about it here.
|
||||||
|
# The stale unique id needs to be handled by a repair flow,
|
||||||
|
# after the config entry has been reloaded.
|
||||||
|
LOGGER.error(
|
||||||
|
"Failed to get server version, cannot update config entry"
|
||||||
|
"unique id with new home id, after controller NVM restore"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
hass.config_entries.async_update_entry(
|
||||||
|
entry, unique_id=str(version_info.home_id)
|
||||||
|
)
|
||||||
|
|
||||||
await hass.config_entries.async_reload(entry.entry_id)
|
await hass.config_entries.async_reload(entry.entry_id)
|
||||||
|
|
||||||
connection.send_message(
|
connection.send_message(
|
||||||
|
@ -65,7 +65,7 @@ from .const import (
|
|||||||
CONF_USE_ADDON,
|
CONF_USE_ADDON,
|
||||||
DATA_CLIENT,
|
DATA_CLIENT,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
RESTORE_NVM_DRIVER_READY_TIMEOUT,
|
DRIVER_READY_TIMEOUT,
|
||||||
)
|
)
|
||||||
from .helpers import CannotConnect, async_get_version_info
|
from .helpers import CannotConnect, async_get_version_info
|
||||||
|
|
||||||
@ -776,17 +776,14 @@ class ZWaveJSConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _async_update_entry(
|
def _async_update_entry(self, updates: dict[str, Any]) -> None:
|
||||||
self, updates: dict[str, Any], *, schedule_reload: bool = True
|
|
||||||
) -> None:
|
|
||||||
"""Update the config entry with new data."""
|
"""Update the config entry with new data."""
|
||||||
config_entry = self._reconfigure_config_entry
|
config_entry = self._reconfigure_config_entry
|
||||||
assert config_entry is not None
|
assert config_entry is not None
|
||||||
self.hass.config_entries.async_update_entry(
|
self.hass.config_entries.async_update_entry(
|
||||||
config_entry, data=config_entry.data | updates
|
config_entry, data=config_entry.data | updates
|
||||||
)
|
)
|
||||||
if schedule_reload:
|
self.hass.config_entries.async_schedule_reload(config_entry.entry_id)
|
||||||
self.hass.config_entries.async_schedule_reload(config_entry.entry_id)
|
|
||||||
|
|
||||||
async def async_step_intent_reconfigure(
|
async def async_step_intent_reconfigure(
|
||||||
self, user_input: dict[str, Any] | None = None
|
self, user_input: dict[str, Any] | None = None
|
||||||
@ -896,15 +893,63 @@ class ZWaveJSConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
# Now that the old controller is gone, we can scan for serial ports again
|
# Now that the old controller is gone, we can scan for serial ports again
|
||||||
return await self.async_step_choose_serial_port()
|
return await self.async_step_choose_serial_port()
|
||||||
|
|
||||||
|
try:
|
||||||
|
driver = self._get_driver()
|
||||||
|
except AbortFlow:
|
||||||
|
return self.async_abort(reason="config_entry_not_loaded")
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def set_driver_ready(event: dict) -> None:
|
||||||
|
"Set the driver ready event."
|
||||||
|
wait_driver_ready.set()
|
||||||
|
|
||||||
|
wait_driver_ready = asyncio.Event()
|
||||||
|
|
||||||
|
unsubscribe = driver.once("driver ready", set_driver_ready)
|
||||||
|
|
||||||
# reset the old controller
|
# reset the old controller
|
||||||
try:
|
try:
|
||||||
await self._get_driver().async_hard_reset()
|
await driver.async_hard_reset()
|
||||||
except (AbortFlow, FailedCommand) as err:
|
except FailedCommand as err:
|
||||||
|
unsubscribe()
|
||||||
_LOGGER.error("Failed to reset controller: %s", err)
|
_LOGGER.error("Failed to reset controller: %s", err)
|
||||||
return self.async_abort(reason="reset_failed")
|
return self.async_abort(reason="reset_failed")
|
||||||
|
|
||||||
|
# Update the unique id of the config entry
|
||||||
|
# to the new home id, which requires waiting for the driver
|
||||||
|
# to be ready before getting the new home id.
|
||||||
|
# If the backup restore, done later in the flow, fails,
|
||||||
|
# the config entry unique id should be the new home id
|
||||||
|
# after the controller reset.
|
||||||
|
try:
|
||||||
|
async with asyncio.timeout(DRIVER_READY_TIMEOUT):
|
||||||
|
await wait_driver_ready.wait()
|
||||||
|
except TimeoutError:
|
||||||
|
pass
|
||||||
|
finally:
|
||||||
|
unsubscribe()
|
||||||
|
|
||||||
config_entry = self._reconfigure_config_entry
|
config_entry = self._reconfigure_config_entry
|
||||||
assert config_entry is not None
|
assert config_entry is not None
|
||||||
|
|
||||||
|
try:
|
||||||
|
version_info = await async_get_version_info(
|
||||||
|
self.hass, config_entry.data[CONF_URL]
|
||||||
|
)
|
||||||
|
except CannotConnect:
|
||||||
|
# Just log this error, as there's nothing to do about it here.
|
||||||
|
# The stale unique id needs to be handled by a repair flow,
|
||||||
|
# after the config entry has been reloaded, if the backup restore
|
||||||
|
# also fails.
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Failed to get server version, cannot update config entry "
|
||||||
|
"unique id with new home id, after controller reset"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.hass.config_entries.async_update_entry(
|
||||||
|
config_entry, unique_id=str(version_info.home_id)
|
||||||
|
)
|
||||||
|
|
||||||
# Unload the config entry before asking the user to unplug the controller.
|
# Unload the config entry before asking the user to unplug the controller.
|
||||||
await self.hass.config_entries.async_unload(config_entry.entry_id)
|
await self.hass.config_entries.async_unload(config_entry.entry_id)
|
||||||
|
|
||||||
@ -1154,14 +1199,17 @@ class ZWaveJSConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
assert ws_address is not None
|
assert ws_address is not None
|
||||||
version_info = self.version_info
|
version_info = self.version_info
|
||||||
assert version_info is not None
|
assert version_info is not None
|
||||||
|
config_entry = self._reconfigure_config_entry
|
||||||
|
assert config_entry is not None
|
||||||
|
|
||||||
# We need to wait for the config entry to be reloaded,
|
# We need to wait for the config entry to be reloaded,
|
||||||
# before restoring the backup.
|
# before restoring the backup.
|
||||||
# We will do this in the restore nvm progress task,
|
# We will do this in the restore nvm progress task,
|
||||||
# to get a nicer user experience.
|
# to get a nicer user experience.
|
||||||
self._async_update_entry(
|
self.hass.config_entries.async_update_entry(
|
||||||
{
|
config_entry,
|
||||||
"unique_id": str(version_info.home_id),
|
data={
|
||||||
|
**config_entry.data,
|
||||||
CONF_URL: ws_address,
|
CONF_URL: ws_address,
|
||||||
CONF_USB_PATH: self.usb_path,
|
CONF_USB_PATH: self.usb_path,
|
||||||
CONF_S0_LEGACY_KEY: self.s0_legacy_key,
|
CONF_S0_LEGACY_KEY: self.s0_legacy_key,
|
||||||
@ -1173,8 +1221,9 @@ class ZWaveJSConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
CONF_USE_ADDON: True,
|
CONF_USE_ADDON: True,
|
||||||
CONF_INTEGRATION_CREATED_ADDON: self.integration_created_addon,
|
CONF_INTEGRATION_CREATED_ADDON: self.integration_created_addon,
|
||||||
},
|
},
|
||||||
schedule_reload=False,
|
unique_id=str(version_info.home_id),
|
||||||
)
|
)
|
||||||
|
|
||||||
return await self.async_step_restore_nvm()
|
return await self.async_step_restore_nvm()
|
||||||
|
|
||||||
async def async_step_finish_addon_setup_reconfigure(
|
async def async_step_finish_addon_setup_reconfigure(
|
||||||
@ -1321,8 +1370,24 @@ class ZWaveJSConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
raise AbortFlow(f"Failed to restore network: {err}") from err
|
raise AbortFlow(f"Failed to restore network: {err}") from err
|
||||||
else:
|
else:
|
||||||
with suppress(TimeoutError):
|
with suppress(TimeoutError):
|
||||||
async with asyncio.timeout(RESTORE_NVM_DRIVER_READY_TIMEOUT):
|
async with asyncio.timeout(DRIVER_READY_TIMEOUT):
|
||||||
await wait_driver_ready.wait()
|
await wait_driver_ready.wait()
|
||||||
|
try:
|
||||||
|
version_info = await async_get_version_info(
|
||||||
|
self.hass, config_entry.data[CONF_URL]
|
||||||
|
)
|
||||||
|
except CannotConnect:
|
||||||
|
# Just log this error, as there's nothing to do about it here.
|
||||||
|
# The stale unique id needs to be handled by a repair flow,
|
||||||
|
# after the config entry has been reloaded.
|
||||||
|
_LOGGER.error(
|
||||||
|
"Failed to get server version, cannot update config entry "
|
||||||
|
"unique id with new home id, after controller reset"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.hass.config_entries.async_update_entry(
|
||||||
|
config_entry, unique_id=str(version_info.home_id)
|
||||||
|
)
|
||||||
await self.hass.config_entries.async_reload(config_entry.entry_id)
|
await self.hass.config_entries.async_reload(config_entry.entry_id)
|
||||||
finally:
|
finally:
|
||||||
for unsub in unsubs:
|
for unsub in unsubs:
|
||||||
|
@ -204,4 +204,4 @@ COVER_TILT_PROPERTY_KEYS: set[str | int | None] = {
|
|||||||
|
|
||||||
# Other constants
|
# Other constants
|
||||||
|
|
||||||
RESTORE_NVM_DRIVER_READY_TIMEOUT = 60
|
DRIVER_READY_TIMEOUT = 60
|
||||||
|
@ -25,7 +25,7 @@ if TYPE_CHECKING:
|
|||||||
APPLICATION_NAME: Final = "HomeAssistant"
|
APPLICATION_NAME: Final = "HomeAssistant"
|
||||||
MAJOR_VERSION: Final = 2025
|
MAJOR_VERSION: Final = 2025
|
||||||
MINOR_VERSION: Final = 5
|
MINOR_VERSION: Final = 5
|
||||||
PATCH_VERSION: Final = "2"
|
PATCH_VERSION: Final = "3"
|
||||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 2)
|
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 2)
|
||||||
|
@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "homeassistant"
|
name = "homeassistant"
|
||||||
version = "2025.5.2"
|
version = "2025.5.3"
|
||||||
license = "Apache-2.0"
|
license = "Apache-2.0"
|
||||||
license-files = ["LICENSE*", "homeassistant/backports/LICENSE*"]
|
license-files = ["LICENSE*", "homeassistant/backports/LICENSE*"]
|
||||||
description = "Open-source home automation platform running on Python 3."
|
description = "Open-source home automation platform running on Python 3."
|
||||||
|
28
requirements_all.txt
generated
28
requirements_all.txt
generated
@ -214,7 +214,7 @@ aiobafi6==0.9.0
|
|||||||
aiobotocore==2.21.1
|
aiobotocore==2.21.1
|
||||||
|
|
||||||
# homeassistant.components.comelit
|
# homeassistant.components.comelit
|
||||||
aiocomelit==0.12.1
|
aiocomelit==0.12.3
|
||||||
|
|
||||||
# homeassistant.components.dhcp
|
# homeassistant.components.dhcp
|
||||||
aiodhcpwatcher==1.1.1
|
aiodhcpwatcher==1.1.1
|
||||||
@ -319,7 +319,7 @@ aionanoleaf==0.2.1
|
|||||||
aionotion==2024.03.0
|
aionotion==2024.03.0
|
||||||
|
|
||||||
# homeassistant.components.ntfy
|
# homeassistant.components.ntfy
|
||||||
aiontfy==0.5.1
|
aiontfy==0.5.2
|
||||||
|
|
||||||
# homeassistant.components.nut
|
# homeassistant.components.nut
|
||||||
aionut==4.3.4
|
aionut==4.3.4
|
||||||
@ -762,7 +762,7 @@ debugpy==1.8.13
|
|||||||
# decora==0.6
|
# decora==0.6
|
||||||
|
|
||||||
# homeassistant.components.ecovacs
|
# homeassistant.components.ecovacs
|
||||||
deebot-client==13.2.0
|
deebot-client==13.2.1
|
||||||
|
|
||||||
# homeassistant.components.ihc
|
# homeassistant.components.ihc
|
||||||
# homeassistant.components.namecheapdns
|
# homeassistant.components.namecheapdns
|
||||||
@ -1158,7 +1158,7 @@ hole==0.8.0
|
|||||||
|
|
||||||
# homeassistant.components.holiday
|
# homeassistant.components.holiday
|
||||||
# homeassistant.components.workday
|
# homeassistant.components.workday
|
||||||
holidays==0.72
|
holidays==0.73
|
||||||
|
|
||||||
# homeassistant.components.frontend
|
# homeassistant.components.frontend
|
||||||
home-assistant-frontend==20250516.0
|
home-assistant-frontend==20250516.0
|
||||||
@ -1614,7 +1614,7 @@ openwrt-luci-rpc==1.1.17
|
|||||||
openwrt-ubus-rpc==0.0.2
|
openwrt-ubus-rpc==0.0.2
|
||||||
|
|
||||||
# homeassistant.components.opower
|
# homeassistant.components.opower
|
||||||
opower==0.12.0
|
opower==0.12.1
|
||||||
|
|
||||||
# homeassistant.components.oralb
|
# homeassistant.components.oralb
|
||||||
oralb-ble==0.17.6
|
oralb-ble==0.17.6
|
||||||
@ -1771,7 +1771,7 @@ py-schluter==0.1.7
|
|||||||
py-sucks==0.9.10
|
py-sucks==0.9.10
|
||||||
|
|
||||||
# homeassistant.components.synology_dsm
|
# homeassistant.components.synology_dsm
|
||||||
py-synologydsm-api==2.7.1
|
py-synologydsm-api==2.7.2
|
||||||
|
|
||||||
# homeassistant.components.atome
|
# homeassistant.components.atome
|
||||||
pyAtome==0.1.1
|
pyAtome==0.1.1
|
||||||
@ -1829,7 +1829,7 @@ pyairnow==1.2.1
|
|||||||
pyairvisual==2023.08.1
|
pyairvisual==2023.08.1
|
||||||
|
|
||||||
# homeassistant.components.aprilaire
|
# homeassistant.components.aprilaire
|
||||||
pyaprilaire==0.8.1
|
pyaprilaire==0.9.0
|
||||||
|
|
||||||
# homeassistant.components.asuswrt
|
# homeassistant.components.asuswrt
|
||||||
pyasuswrt==0.1.21
|
pyasuswrt==0.1.21
|
||||||
@ -1973,7 +1973,7 @@ pyevilgenius==2.0.0
|
|||||||
pyezviz==0.2.1.2
|
pyezviz==0.2.1.2
|
||||||
|
|
||||||
# homeassistant.components.fibaro
|
# homeassistant.components.fibaro
|
||||||
pyfibaro==0.8.2
|
pyfibaro==0.8.3
|
||||||
|
|
||||||
# homeassistant.components.fido
|
# homeassistant.components.fido
|
||||||
pyfido==2.1.2
|
pyfido==2.1.2
|
||||||
@ -2093,7 +2093,7 @@ pykwb==0.0.8
|
|||||||
pylacrosse==0.4
|
pylacrosse==0.4
|
||||||
|
|
||||||
# homeassistant.components.lamarzocco
|
# homeassistant.components.lamarzocco
|
||||||
pylamarzocco==2.0.3
|
pylamarzocco==2.0.4
|
||||||
|
|
||||||
# homeassistant.components.lastfm
|
# homeassistant.components.lastfm
|
||||||
pylast==5.1.0
|
pylast==5.1.0
|
||||||
@ -2326,7 +2326,7 @@ pysma==0.7.5
|
|||||||
pysmappee==0.2.29
|
pysmappee==0.2.29
|
||||||
|
|
||||||
# homeassistant.components.smartthings
|
# homeassistant.components.smartthings
|
||||||
pysmartthings==3.2.2
|
pysmartthings==3.2.3
|
||||||
|
|
||||||
# homeassistant.components.smarty
|
# homeassistant.components.smarty
|
||||||
pysmarty2==0.10.2
|
pysmarty2==0.10.2
|
||||||
@ -2356,7 +2356,7 @@ pyspcwebgw==0.7.0
|
|||||||
pyspeex-noise==1.0.2
|
pyspeex-noise==1.0.2
|
||||||
|
|
||||||
# homeassistant.components.squeezebox
|
# homeassistant.components.squeezebox
|
||||||
pysqueezebox==0.12.0
|
pysqueezebox==0.12.1
|
||||||
|
|
||||||
# homeassistant.components.stiebel_eltron
|
# homeassistant.components.stiebel_eltron
|
||||||
pystiebeleltron==0.1.0
|
pystiebeleltron==0.1.0
|
||||||
@ -2713,7 +2713,7 @@ sendgrid==6.8.2
|
|||||||
|
|
||||||
# homeassistant.components.emulated_kasa
|
# homeassistant.components.emulated_kasa
|
||||||
# homeassistant.components.sense
|
# homeassistant.components.sense
|
||||||
sense-energy==0.13.7
|
sense-energy==0.13.8
|
||||||
|
|
||||||
# homeassistant.components.sensirion_ble
|
# homeassistant.components.sensirion_ble
|
||||||
sensirion-ble==0.1.1
|
sensirion-ble==0.1.1
|
||||||
@ -3016,7 +3016,7 @@ vallox-websocket-api==5.3.0
|
|||||||
vehicle==2.2.2
|
vehicle==2.2.2
|
||||||
|
|
||||||
# homeassistant.components.velbus
|
# homeassistant.components.velbus
|
||||||
velbus-aio==2025.4.2
|
velbus-aio==2025.5.0
|
||||||
|
|
||||||
# homeassistant.components.venstar
|
# homeassistant.components.venstar
|
||||||
venstarcolortouch==0.19
|
venstarcolortouch==0.19
|
||||||
@ -3147,7 +3147,7 @@ youless-api==2.2.0
|
|||||||
youtubeaio==1.1.5
|
youtubeaio==1.1.5
|
||||||
|
|
||||||
# homeassistant.components.media_extractor
|
# homeassistant.components.media_extractor
|
||||||
yt-dlp[default]==2025.03.31
|
yt-dlp[default]==2025.05.22
|
||||||
|
|
||||||
# homeassistant.components.zabbix
|
# homeassistant.components.zabbix
|
||||||
zabbix-utils==2.0.2
|
zabbix-utils==2.0.2
|
||||||
|
28
requirements_test_all.txt
generated
28
requirements_test_all.txt
generated
@ -202,7 +202,7 @@ aiobafi6==0.9.0
|
|||||||
aiobotocore==2.21.1
|
aiobotocore==2.21.1
|
||||||
|
|
||||||
# homeassistant.components.comelit
|
# homeassistant.components.comelit
|
||||||
aiocomelit==0.12.1
|
aiocomelit==0.12.3
|
||||||
|
|
||||||
# homeassistant.components.dhcp
|
# homeassistant.components.dhcp
|
||||||
aiodhcpwatcher==1.1.1
|
aiodhcpwatcher==1.1.1
|
||||||
@ -301,7 +301,7 @@ aionanoleaf==0.2.1
|
|||||||
aionotion==2024.03.0
|
aionotion==2024.03.0
|
||||||
|
|
||||||
# homeassistant.components.ntfy
|
# homeassistant.components.ntfy
|
||||||
aiontfy==0.5.1
|
aiontfy==0.5.2
|
||||||
|
|
||||||
# homeassistant.components.nut
|
# homeassistant.components.nut
|
||||||
aionut==4.3.4
|
aionut==4.3.4
|
||||||
@ -653,7 +653,7 @@ dbus-fast==2.43.0
|
|||||||
debugpy==1.8.13
|
debugpy==1.8.13
|
||||||
|
|
||||||
# homeassistant.components.ecovacs
|
# homeassistant.components.ecovacs
|
||||||
deebot-client==13.2.0
|
deebot-client==13.2.1
|
||||||
|
|
||||||
# homeassistant.components.ihc
|
# homeassistant.components.ihc
|
||||||
# homeassistant.components.namecheapdns
|
# homeassistant.components.namecheapdns
|
||||||
@ -988,7 +988,7 @@ hole==0.8.0
|
|||||||
|
|
||||||
# homeassistant.components.holiday
|
# homeassistant.components.holiday
|
||||||
# homeassistant.components.workday
|
# homeassistant.components.workday
|
||||||
holidays==0.72
|
holidays==0.73
|
||||||
|
|
||||||
# homeassistant.components.frontend
|
# homeassistant.components.frontend
|
||||||
home-assistant-frontend==20250516.0
|
home-assistant-frontend==20250516.0
|
||||||
@ -1351,7 +1351,7 @@ openhomedevice==2.2.0
|
|||||||
openwebifpy==4.3.1
|
openwebifpy==4.3.1
|
||||||
|
|
||||||
# homeassistant.components.opower
|
# homeassistant.components.opower
|
||||||
opower==0.12.0
|
opower==0.12.1
|
||||||
|
|
||||||
# homeassistant.components.oralb
|
# homeassistant.components.oralb
|
||||||
oralb-ble==0.17.6
|
oralb-ble==0.17.6
|
||||||
@ -1470,7 +1470,7 @@ py-nightscout==1.2.2
|
|||||||
py-sucks==0.9.10
|
py-sucks==0.9.10
|
||||||
|
|
||||||
# homeassistant.components.synology_dsm
|
# homeassistant.components.synology_dsm
|
||||||
py-synologydsm-api==2.7.1
|
py-synologydsm-api==2.7.2
|
||||||
|
|
||||||
# homeassistant.components.hdmi_cec
|
# homeassistant.components.hdmi_cec
|
||||||
pyCEC==0.5.2
|
pyCEC==0.5.2
|
||||||
@ -1510,7 +1510,7 @@ pyairnow==1.2.1
|
|||||||
pyairvisual==2023.08.1
|
pyairvisual==2023.08.1
|
||||||
|
|
||||||
# homeassistant.components.aprilaire
|
# homeassistant.components.aprilaire
|
||||||
pyaprilaire==0.8.1
|
pyaprilaire==0.9.0
|
||||||
|
|
||||||
# homeassistant.components.asuswrt
|
# homeassistant.components.asuswrt
|
||||||
pyasuswrt==0.1.21
|
pyasuswrt==0.1.21
|
||||||
@ -1612,7 +1612,7 @@ pyevilgenius==2.0.0
|
|||||||
pyezviz==0.2.1.2
|
pyezviz==0.2.1.2
|
||||||
|
|
||||||
# homeassistant.components.fibaro
|
# homeassistant.components.fibaro
|
||||||
pyfibaro==0.8.2
|
pyfibaro==0.8.3
|
||||||
|
|
||||||
# homeassistant.components.fido
|
# homeassistant.components.fido
|
||||||
pyfido==2.1.2
|
pyfido==2.1.2
|
||||||
@ -1708,7 +1708,7 @@ pykrakenapi==0.1.8
|
|||||||
pykulersky==0.5.8
|
pykulersky==0.5.8
|
||||||
|
|
||||||
# homeassistant.components.lamarzocco
|
# homeassistant.components.lamarzocco
|
||||||
pylamarzocco==2.0.3
|
pylamarzocco==2.0.4
|
||||||
|
|
||||||
# homeassistant.components.lastfm
|
# homeassistant.components.lastfm
|
||||||
pylast==5.1.0
|
pylast==5.1.0
|
||||||
@ -1899,7 +1899,7 @@ pysma==0.7.5
|
|||||||
pysmappee==0.2.29
|
pysmappee==0.2.29
|
||||||
|
|
||||||
# homeassistant.components.smartthings
|
# homeassistant.components.smartthings
|
||||||
pysmartthings==3.2.2
|
pysmartthings==3.2.3
|
||||||
|
|
||||||
# homeassistant.components.smarty
|
# homeassistant.components.smarty
|
||||||
pysmarty2==0.10.2
|
pysmarty2==0.10.2
|
||||||
@ -1929,7 +1929,7 @@ pyspcwebgw==0.7.0
|
|||||||
pyspeex-noise==1.0.2
|
pyspeex-noise==1.0.2
|
||||||
|
|
||||||
# homeassistant.components.squeezebox
|
# homeassistant.components.squeezebox
|
||||||
pysqueezebox==0.12.0
|
pysqueezebox==0.12.1
|
||||||
|
|
||||||
# homeassistant.components.stiebel_eltron
|
# homeassistant.components.stiebel_eltron
|
||||||
pystiebeleltron==0.1.0
|
pystiebeleltron==0.1.0
|
||||||
@ -2196,7 +2196,7 @@ securetar==2025.2.1
|
|||||||
|
|
||||||
# homeassistant.components.emulated_kasa
|
# homeassistant.components.emulated_kasa
|
||||||
# homeassistant.components.sense
|
# homeassistant.components.sense
|
||||||
sense-energy==0.13.7
|
sense-energy==0.13.8
|
||||||
|
|
||||||
# homeassistant.components.sensirion_ble
|
# homeassistant.components.sensirion_ble
|
||||||
sensirion-ble==0.1.1
|
sensirion-ble==0.1.1
|
||||||
@ -2439,7 +2439,7 @@ vallox-websocket-api==5.3.0
|
|||||||
vehicle==2.2.2
|
vehicle==2.2.2
|
||||||
|
|
||||||
# homeassistant.components.velbus
|
# homeassistant.components.velbus
|
||||||
velbus-aio==2025.4.2
|
velbus-aio==2025.5.0
|
||||||
|
|
||||||
# homeassistant.components.venstar
|
# homeassistant.components.venstar
|
||||||
venstarcolortouch==0.19
|
venstarcolortouch==0.19
|
||||||
@ -2549,7 +2549,7 @@ youless-api==2.2.0
|
|||||||
youtubeaio==1.1.5
|
youtubeaio==1.1.5
|
||||||
|
|
||||||
# homeassistant.components.media_extractor
|
# homeassistant.components.media_extractor
|
||||||
yt-dlp[default]==2025.03.31
|
yt-dlp[default]==2025.05.22
|
||||||
|
|
||||||
# homeassistant.components.zamg
|
# homeassistant.components.zamg
|
||||||
zamg==0.3.6
|
zamg==0.3.6
|
||||||
|
@ -6,7 +6,7 @@ from collections.abc import AsyncGenerator
|
|||||||
from io import StringIO
|
from io import StringIO
|
||||||
from unittest.mock import ANY, Mock, patch
|
from unittest.mock import ANY, Mock, patch
|
||||||
|
|
||||||
from azure.core.exceptions import HttpResponseError
|
from azure.core.exceptions import AzureError, HttpResponseError, ServiceRequestError
|
||||||
from azure.storage.blob import BlobProperties
|
from azure.storage.blob import BlobProperties
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@ -276,14 +276,33 @@ async def test_agents_error_on_download_not_found(
|
|||||||
assert mock_client.download_blob.call_count == 0
|
assert mock_client.download_blob.call_count == 0
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
("error", "message"),
|
||||||
|
[
|
||||||
|
(
|
||||||
|
HttpResponseError("http error"),
|
||||||
|
"Error during backup operation in async_delete_backup: Status None, message: http error",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
ServiceRequestError("timeout"),
|
||||||
|
"Timeout during backup operation in async_delete_backup",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
AzureError("generic error"),
|
||||||
|
"Error during backup operation in async_delete_backup: generic error",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
async def test_error_during_delete(
|
async def test_error_during_delete(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
hass_ws_client: WebSocketGenerator,
|
hass_ws_client: WebSocketGenerator,
|
||||||
mock_client: MagicMock,
|
mock_client: MagicMock,
|
||||||
mock_config_entry: MockConfigEntry,
|
mock_config_entry: MockConfigEntry,
|
||||||
|
error: Exception,
|
||||||
|
message: str,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test the error wrapper."""
|
"""Test the error wrapper."""
|
||||||
mock_client.delete_blob.side_effect = HttpResponseError("Failed to delete backup")
|
mock_client.delete_blob.side_effect = error
|
||||||
|
|
||||||
client = await hass_ws_client(hass)
|
client = await hass_ws_client(hass)
|
||||||
|
|
||||||
@ -297,12 +316,7 @@ async def test_error_during_delete(
|
|||||||
|
|
||||||
assert response["success"]
|
assert response["success"]
|
||||||
assert response["result"] == {
|
assert response["result"] == {
|
||||||
"agent_errors": {
|
"agent_errors": {f"{DOMAIN}.{mock_config_entry.entry_id}": message}
|
||||||
f"{DOMAIN}.{mock_config_entry.entry_id}": (
|
|
||||||
"Error during backup operation in async_delete_backup: "
|
|
||||||
"Status None, message: Failed to delete backup"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -84,7 +84,7 @@ async def test_climate_data_update(
|
|||||||
freezer: FrozenDateTimeFactory,
|
freezer: FrozenDateTimeFactory,
|
||||||
mock_serial_bridge: AsyncMock,
|
mock_serial_bridge: AsyncMock,
|
||||||
mock_serial_bridge_config_entry: MockConfigEntry,
|
mock_serial_bridge_config_entry: MockConfigEntry,
|
||||||
val: list[Any, Any],
|
val: list[list[Any]],
|
||||||
mode: HVACMode,
|
mode: HVACMode,
|
||||||
temp: float,
|
temp: float,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -91,7 +91,7 @@ async def test_humidifier_data_update(
|
|||||||
freezer: FrozenDateTimeFactory,
|
freezer: FrozenDateTimeFactory,
|
||||||
mock_serial_bridge: AsyncMock,
|
mock_serial_bridge: AsyncMock,
|
||||||
mock_serial_bridge_config_entry: MockConfigEntry,
|
mock_serial_bridge_config_entry: MockConfigEntry,
|
||||||
val: list[Any, Any],
|
val: list[list[Any]],
|
||||||
mode: str,
|
mode: str,
|
||||||
humidity: float,
|
humidity: float,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -172,8 +172,11 @@
|
|||||||
}),
|
}),
|
||||||
'name': None,
|
'name': None,
|
||||||
'options': dict({
|
'options': dict({
|
||||||
|
'sensor.private': dict({
|
||||||
|
'suggested_unit_of_measurement': <UnitOfArea.SQUARE_METERS: 'm²'>,
|
||||||
|
}),
|
||||||
}),
|
}),
|
||||||
'original_device_class': None,
|
'original_device_class': <SensorDeviceClass.AREA: 'area'>,
|
||||||
'original_icon': None,
|
'original_icon': None,
|
||||||
'original_name': 'Area cleaned',
|
'original_name': 'Area cleaned',
|
||||||
'platform': 'ecovacs',
|
'platform': 'ecovacs',
|
||||||
@ -181,21 +184,22 @@
|
|||||||
'supported_features': 0,
|
'supported_features': 0,
|
||||||
'translation_key': 'stats_area',
|
'translation_key': 'stats_area',
|
||||||
'unique_id': '8516fbb1-17f1-4194-0000000_stats_area',
|
'unique_id': '8516fbb1-17f1-4194-0000000_stats_area',
|
||||||
'unit_of_measurement': <UnitOfArea.SQUARE_CENTIMETERS: 'cm²'>,
|
'unit_of_measurement': <UnitOfArea.SQUARE_METERS: 'm²'>,
|
||||||
})
|
})
|
||||||
# ---
|
# ---
|
||||||
# name: test_sensors[5xu9h3][sensor.goat_g1_area_cleaned:state]
|
# name: test_sensors[5xu9h3][sensor.goat_g1_area_cleaned:state]
|
||||||
StateSnapshot({
|
StateSnapshot({
|
||||||
'attributes': ReadOnlyDict({
|
'attributes': ReadOnlyDict({
|
||||||
|
'device_class': 'area',
|
||||||
'friendly_name': 'Goat G1 Area cleaned',
|
'friendly_name': 'Goat G1 Area cleaned',
|
||||||
'unit_of_measurement': <UnitOfArea.SQUARE_CENTIMETERS: 'cm²'>,
|
'unit_of_measurement': <UnitOfArea.SQUARE_METERS: 'm²'>,
|
||||||
}),
|
}),
|
||||||
'context': <ANY>,
|
'context': <ANY>,
|
||||||
'entity_id': 'sensor.goat_g1_area_cleaned',
|
'entity_id': 'sensor.goat_g1_area_cleaned',
|
||||||
'last_changed': <ANY>,
|
'last_changed': <ANY>,
|
||||||
'last_reported': <ANY>,
|
'last_reported': <ANY>,
|
||||||
'last_updated': <ANY>,
|
'last_updated': <ANY>,
|
||||||
'state': '10',
|
'state': '0.0010',
|
||||||
})
|
})
|
||||||
# ---
|
# ---
|
||||||
# name: test_sensors[5xu9h3][sensor.goat_g1_battery:entity-registry]
|
# name: test_sensors[5xu9h3][sensor.goat_g1_battery:entity-registry]
|
||||||
@ -515,7 +519,7 @@
|
|||||||
'name': None,
|
'name': None,
|
||||||
'options': dict({
|
'options': dict({
|
||||||
}),
|
}),
|
||||||
'original_device_class': None,
|
'original_device_class': <SensorDeviceClass.AREA: 'area'>,
|
||||||
'original_icon': None,
|
'original_icon': None,
|
||||||
'original_name': 'Total area cleaned',
|
'original_name': 'Total area cleaned',
|
||||||
'platform': 'ecovacs',
|
'platform': 'ecovacs',
|
||||||
@ -523,15 +527,16 @@
|
|||||||
'supported_features': 0,
|
'supported_features': 0,
|
||||||
'translation_key': 'total_stats_area',
|
'translation_key': 'total_stats_area',
|
||||||
'unique_id': '8516fbb1-17f1-4194-0000000_total_stats_area',
|
'unique_id': '8516fbb1-17f1-4194-0000000_total_stats_area',
|
||||||
'unit_of_measurement': <UnitOfArea.SQUARE_CENTIMETERS: 'cm²'>,
|
'unit_of_measurement': <UnitOfArea.SQUARE_METERS: 'm²'>,
|
||||||
})
|
})
|
||||||
# ---
|
# ---
|
||||||
# name: test_sensors[5xu9h3][sensor.goat_g1_total_area_cleaned:state]
|
# name: test_sensors[5xu9h3][sensor.goat_g1_total_area_cleaned:state]
|
||||||
StateSnapshot({
|
StateSnapshot({
|
||||||
'attributes': ReadOnlyDict({
|
'attributes': ReadOnlyDict({
|
||||||
|
'device_class': 'area',
|
||||||
'friendly_name': 'Goat G1 Total area cleaned',
|
'friendly_name': 'Goat G1 Total area cleaned',
|
||||||
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
|
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
|
||||||
'unit_of_measurement': <UnitOfArea.SQUARE_CENTIMETERS: 'cm²'>,
|
'unit_of_measurement': <UnitOfArea.SQUARE_METERS: 'm²'>,
|
||||||
}),
|
}),
|
||||||
'context': <ANY>,
|
'context': <ANY>,
|
||||||
'entity_id': 'sensor.goat_g1_total_area_cleaned',
|
'entity_id': 'sensor.goat_g1_total_area_cleaned',
|
||||||
@ -762,8 +767,11 @@
|
|||||||
}),
|
}),
|
||||||
'name': None,
|
'name': None,
|
||||||
'options': dict({
|
'options': dict({
|
||||||
|
'sensor.private': dict({
|
||||||
|
'suggested_unit_of_measurement': <UnitOfArea.SQUARE_METERS: 'm²'>,
|
||||||
|
}),
|
||||||
}),
|
}),
|
||||||
'original_device_class': None,
|
'original_device_class': <SensorDeviceClass.AREA: 'area'>,
|
||||||
'original_icon': None,
|
'original_icon': None,
|
||||||
'original_name': 'Area cleaned',
|
'original_name': 'Area cleaned',
|
||||||
'platform': 'ecovacs',
|
'platform': 'ecovacs',
|
||||||
@ -777,6 +785,7 @@
|
|||||||
# name: test_sensors[qhe2o2][sensor.dusty_area_cleaned:state]
|
# name: test_sensors[qhe2o2][sensor.dusty_area_cleaned:state]
|
||||||
StateSnapshot({
|
StateSnapshot({
|
||||||
'attributes': ReadOnlyDict({
|
'attributes': ReadOnlyDict({
|
||||||
|
'device_class': 'area',
|
||||||
'friendly_name': 'Dusty Area cleaned',
|
'friendly_name': 'Dusty Area cleaned',
|
||||||
'unit_of_measurement': <UnitOfArea.SQUARE_METERS: 'm²'>,
|
'unit_of_measurement': <UnitOfArea.SQUARE_METERS: 'm²'>,
|
||||||
}),
|
}),
|
||||||
@ -1258,7 +1267,7 @@
|
|||||||
'name': None,
|
'name': None,
|
||||||
'options': dict({
|
'options': dict({
|
||||||
}),
|
}),
|
||||||
'original_device_class': None,
|
'original_device_class': <SensorDeviceClass.AREA: 'area'>,
|
||||||
'original_icon': None,
|
'original_icon': None,
|
||||||
'original_name': 'Total area cleaned',
|
'original_name': 'Total area cleaned',
|
||||||
'platform': 'ecovacs',
|
'platform': 'ecovacs',
|
||||||
@ -1272,6 +1281,7 @@
|
|||||||
# name: test_sensors[qhe2o2][sensor.dusty_total_area_cleaned:state]
|
# name: test_sensors[qhe2o2][sensor.dusty_total_area_cleaned:state]
|
||||||
StateSnapshot({
|
StateSnapshot({
|
||||||
'attributes': ReadOnlyDict({
|
'attributes': ReadOnlyDict({
|
||||||
|
'device_class': 'area',
|
||||||
'friendly_name': 'Dusty Total area cleaned',
|
'friendly_name': 'Dusty Total area cleaned',
|
||||||
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
|
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
|
||||||
'unit_of_measurement': <UnitOfArea.SQUARE_METERS: 'm²'>,
|
'unit_of_measurement': <UnitOfArea.SQUARE_METERS: 'm²'>,
|
||||||
@ -1553,8 +1563,11 @@
|
|||||||
}),
|
}),
|
||||||
'name': None,
|
'name': None,
|
||||||
'options': dict({
|
'options': dict({
|
||||||
|
'sensor.private': dict({
|
||||||
|
'suggested_unit_of_measurement': <UnitOfArea.SQUARE_METERS: 'm²'>,
|
||||||
|
}),
|
||||||
}),
|
}),
|
||||||
'original_device_class': None,
|
'original_device_class': <SensorDeviceClass.AREA: 'area'>,
|
||||||
'original_icon': None,
|
'original_icon': None,
|
||||||
'original_name': 'Area cleaned',
|
'original_name': 'Area cleaned',
|
||||||
'platform': 'ecovacs',
|
'platform': 'ecovacs',
|
||||||
@ -1568,6 +1581,7 @@
|
|||||||
# name: test_sensors[yna5x1][sensor.ozmo_950_area_cleaned:state]
|
# name: test_sensors[yna5x1][sensor.ozmo_950_area_cleaned:state]
|
||||||
StateSnapshot({
|
StateSnapshot({
|
||||||
'attributes': ReadOnlyDict({
|
'attributes': ReadOnlyDict({
|
||||||
|
'device_class': 'area',
|
||||||
'friendly_name': 'Ozmo 950 Area cleaned',
|
'friendly_name': 'Ozmo 950 Area cleaned',
|
||||||
'unit_of_measurement': <UnitOfArea.SQUARE_METERS: 'm²'>,
|
'unit_of_measurement': <UnitOfArea.SQUARE_METERS: 'm²'>,
|
||||||
}),
|
}),
|
||||||
@ -1944,7 +1958,7 @@
|
|||||||
'name': None,
|
'name': None,
|
||||||
'options': dict({
|
'options': dict({
|
||||||
}),
|
}),
|
||||||
'original_device_class': None,
|
'original_device_class': <SensorDeviceClass.AREA: 'area'>,
|
||||||
'original_icon': None,
|
'original_icon': None,
|
||||||
'original_name': 'Total area cleaned',
|
'original_name': 'Total area cleaned',
|
||||||
'platform': 'ecovacs',
|
'platform': 'ecovacs',
|
||||||
@ -1958,6 +1972,7 @@
|
|||||||
# name: test_sensors[yna5x1][sensor.ozmo_950_total_area_cleaned:state]
|
# name: test_sensors[yna5x1][sensor.ozmo_950_total_area_cleaned:state]
|
||||||
StateSnapshot({
|
StateSnapshot({
|
||||||
'attributes': ReadOnlyDict({
|
'attributes': ReadOnlyDict({
|
||||||
|
'device_class': 'area',
|
||||||
'friendly_name': 'Ozmo 950 Total area cleaned',
|
'friendly_name': 'Ozmo 950 Total area cleaned',
|
||||||
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
|
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
|
||||||
'unit_of_measurement': <UnitOfArea.SQUARE_METERS: 'm²'>,
|
'unit_of_measurement': <UnitOfArea.SQUARE_METERS: 'm²'>,
|
||||||
|
@ -1017,6 +1017,18 @@ async def test_start_from_history_then_watch_state_changes_sliding(
|
|||||||
}
|
}
|
||||||
for i, sensor_type in enumerate(["time", "ratio", "count"])
|
for i, sensor_type in enumerate(["time", "ratio", "count"])
|
||||||
]
|
]
|
||||||
|
+ [
|
||||||
|
{
|
||||||
|
"platform": "history_stats",
|
||||||
|
"entity_id": "binary_sensor.state",
|
||||||
|
"name": f"sensor_delayed{i}",
|
||||||
|
"state": "on",
|
||||||
|
"end": "{{ utcnow()-timedelta(minutes=5) }}",
|
||||||
|
"duration": {"minutes": 55},
|
||||||
|
"type": sensor_type,
|
||||||
|
}
|
||||||
|
for i, sensor_type in enumerate(["time", "ratio", "count"])
|
||||||
|
]
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
@ -1028,6 +1040,9 @@ async def test_start_from_history_then_watch_state_changes_sliding(
|
|||||||
assert hass.states.get("sensor.sensor0").state == "0.0"
|
assert hass.states.get("sensor.sensor0").state == "0.0"
|
||||||
assert hass.states.get("sensor.sensor1").state == "0.0"
|
assert hass.states.get("sensor.sensor1").state == "0.0"
|
||||||
assert hass.states.get("sensor.sensor2").state == "0"
|
assert hass.states.get("sensor.sensor2").state == "0"
|
||||||
|
assert hass.states.get("sensor.sensor_delayed0").state == "0.0"
|
||||||
|
assert hass.states.get("sensor.sensor_delayed1").state == "0.0"
|
||||||
|
assert hass.states.get("sensor.sensor_delayed2").state == "0"
|
||||||
|
|
||||||
with freeze_time(time):
|
with freeze_time(time):
|
||||||
hass.states.async_set("binary_sensor.state", "on")
|
hass.states.async_set("binary_sensor.state", "on")
|
||||||
@ -1038,6 +1053,10 @@ async def test_start_from_history_then_watch_state_changes_sliding(
|
|||||||
assert hass.states.get("sensor.sensor0").state == "0.0"
|
assert hass.states.get("sensor.sensor0").state == "0.0"
|
||||||
assert hass.states.get("sensor.sensor1").state == "0.0"
|
assert hass.states.get("sensor.sensor1").state == "0.0"
|
||||||
assert hass.states.get("sensor.sensor2").state == "1"
|
assert hass.states.get("sensor.sensor2").state == "1"
|
||||||
|
# Delayed sensor will not have registered the turn on yet
|
||||||
|
assert hass.states.get("sensor.sensor_delayed0").state == "0.0"
|
||||||
|
assert hass.states.get("sensor.sensor_delayed1").state == "0.0"
|
||||||
|
assert hass.states.get("sensor.sensor_delayed2").state == "0"
|
||||||
|
|
||||||
# After sensor has been on for 15 minutes, check state
|
# After sensor has been on for 15 minutes, check state
|
||||||
time += timedelta(minutes=15) # 00:15
|
time += timedelta(minutes=15) # 00:15
|
||||||
@ -1048,6 +1067,10 @@ async def test_start_from_history_then_watch_state_changes_sliding(
|
|||||||
assert hass.states.get("sensor.sensor0").state == "0.25"
|
assert hass.states.get("sensor.sensor0").state == "0.25"
|
||||||
assert hass.states.get("sensor.sensor1").state == "25.0"
|
assert hass.states.get("sensor.sensor1").state == "25.0"
|
||||||
assert hass.states.get("sensor.sensor2").state == "1"
|
assert hass.states.get("sensor.sensor2").state == "1"
|
||||||
|
# Delayed sensor will only have data from 00:00 - 00:10
|
||||||
|
assert hass.states.get("sensor.sensor_delayed0").state == "0.17"
|
||||||
|
assert hass.states.get("sensor.sensor_delayed1").state == "18.2" # 10 / 55
|
||||||
|
assert hass.states.get("sensor.sensor_delayed2").state == "1"
|
||||||
|
|
||||||
with freeze_time(time):
|
with freeze_time(time):
|
||||||
hass.states.async_set("binary_sensor.state", "off")
|
hass.states.async_set("binary_sensor.state", "off")
|
||||||
@ -1064,6 +1087,9 @@ async def test_start_from_history_then_watch_state_changes_sliding(
|
|||||||
assert hass.states.get("sensor.sensor0").state == "0.25"
|
assert hass.states.get("sensor.sensor0").state == "0.25"
|
||||||
assert hass.states.get("sensor.sensor1").state == "25.0"
|
assert hass.states.get("sensor.sensor1").state == "25.0"
|
||||||
assert hass.states.get("sensor.sensor2").state == "1"
|
assert hass.states.get("sensor.sensor2").state == "1"
|
||||||
|
assert hass.states.get("sensor.sensor_delayed0").state == "0.25"
|
||||||
|
assert hass.states.get("sensor.sensor_delayed1").state == "27.3" # 15 / 55
|
||||||
|
assert hass.states.get("sensor.sensor_delayed2").state == "1"
|
||||||
|
|
||||||
time += timedelta(minutes=20) # 01:05
|
time += timedelta(minutes=20) # 01:05
|
||||||
|
|
||||||
@ -1075,6 +1101,9 @@ async def test_start_from_history_then_watch_state_changes_sliding(
|
|||||||
assert hass.states.get("sensor.sensor0").state == "0.17"
|
assert hass.states.get("sensor.sensor0").state == "0.17"
|
||||||
assert hass.states.get("sensor.sensor1").state == "16.7"
|
assert hass.states.get("sensor.sensor1").state == "16.7"
|
||||||
assert hass.states.get("sensor.sensor2").state == "1"
|
assert hass.states.get("sensor.sensor2").state == "1"
|
||||||
|
assert hass.states.get("sensor.sensor_delayed0").state == "0.17"
|
||||||
|
assert hass.states.get("sensor.sensor_delayed1").state == "18.2" # 10 / 55
|
||||||
|
assert hass.states.get("sensor.sensor_delayed2").state == "1"
|
||||||
|
|
||||||
time += timedelta(minutes=5) # 01:10
|
time += timedelta(minutes=5) # 01:10
|
||||||
|
|
||||||
@ -1086,6 +1115,9 @@ async def test_start_from_history_then_watch_state_changes_sliding(
|
|||||||
assert hass.states.get("sensor.sensor0").state == "0.08"
|
assert hass.states.get("sensor.sensor0").state == "0.08"
|
||||||
assert hass.states.get("sensor.sensor1").state == "8.3"
|
assert hass.states.get("sensor.sensor1").state == "8.3"
|
||||||
assert hass.states.get("sensor.sensor2").state == "1"
|
assert hass.states.get("sensor.sensor2").state == "1"
|
||||||
|
assert hass.states.get("sensor.sensor_delayed0").state == "0.08"
|
||||||
|
assert hass.states.get("sensor.sensor_delayed1").state == "9.1" # 5 / 55
|
||||||
|
assert hass.states.get("sensor.sensor_delayed2").state == "1"
|
||||||
|
|
||||||
time += timedelta(minutes=10) # 01:20
|
time += timedelta(minutes=10) # 01:20
|
||||||
|
|
||||||
@ -1096,6 +1128,9 @@ async def test_start_from_history_then_watch_state_changes_sliding(
|
|||||||
assert hass.states.get("sensor.sensor0").state == "0.0"
|
assert hass.states.get("sensor.sensor0").state == "0.0"
|
||||||
assert hass.states.get("sensor.sensor1").state == "0.0"
|
assert hass.states.get("sensor.sensor1").state == "0.0"
|
||||||
assert hass.states.get("sensor.sensor2").state == "0"
|
assert hass.states.get("sensor.sensor2").state == "0"
|
||||||
|
assert hass.states.get("sensor.sensor_delayed0").state == "0.0"
|
||||||
|
assert hass.states.get("sensor.sensor_delayed1").state == "0.0"
|
||||||
|
assert hass.states.get("sensor.sensor_delayed2").state == "0"
|
||||||
|
|
||||||
|
|
||||||
async def test_does_not_work_into_the_future(
|
async def test_does_not_work_into_the_future(
|
||||||
@ -1629,7 +1664,7 @@ async def test_state_change_during_window_rollover(
|
|||||||
"entity_id": "binary_sensor.state",
|
"entity_id": "binary_sensor.state",
|
||||||
"name": "sensor1",
|
"name": "sensor1",
|
||||||
"state": "on",
|
"state": "on",
|
||||||
"start": "{{ today_at() }}",
|
"start": "{{ today_at('12:00') if now().hour == 1 else today_at() }}",
|
||||||
"end": "{{ now() }}",
|
"end": "{{ now() }}",
|
||||||
"type": "time",
|
"type": "time",
|
||||||
}
|
}
|
||||||
@ -1644,7 +1679,7 @@ async def test_state_change_during_window_rollover(
|
|||||||
assert hass.states.get("sensor.sensor1").state == "11.0"
|
assert hass.states.get("sensor.sensor1").state == "11.0"
|
||||||
|
|
||||||
# Advance 59 minutes, to record the last minute update just before midnight, just like a real system would do.
|
# Advance 59 minutes, to record the last minute update just before midnight, just like a real system would do.
|
||||||
t2 = start_time + timedelta(minutes=59, microseconds=300)
|
t2 = start_time + timedelta(minutes=59, microseconds=300) # 23:59
|
||||||
with freeze_time(t2):
|
with freeze_time(t2):
|
||||||
async_fire_time_changed(hass, t2)
|
async_fire_time_changed(hass, t2)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
@ -1653,7 +1688,7 @@ async def test_state_change_during_window_rollover(
|
|||||||
|
|
||||||
# One minute has passed and the time has now rolled over into a new day, resetting the recorder window.
|
# One minute has passed and the time has now rolled over into a new day, resetting the recorder window.
|
||||||
# The sensor will be ON since midnight.
|
# The sensor will be ON since midnight.
|
||||||
t3 = t2 + timedelta(minutes=1)
|
t3 = t2 + timedelta(minutes=1) # 00:01
|
||||||
with freeze_time(t3):
|
with freeze_time(t3):
|
||||||
# The sensor turns off around this time, before the sensor does its normal polled update.
|
# The sensor turns off around this time, before the sensor does its normal polled update.
|
||||||
hass.states.async_set("binary_sensor.state", "off")
|
hass.states.async_set("binary_sensor.state", "off")
|
||||||
@ -1662,13 +1697,69 @@ async def test_state_change_during_window_rollover(
|
|||||||
assert hass.states.get("sensor.sensor1").state == "0.0"
|
assert hass.states.get("sensor.sensor1").state == "0.0"
|
||||||
|
|
||||||
# More time passes, and the history stats does a polled update again. It should be 0 since the sensor has been off since midnight.
|
# More time passes, and the history stats does a polled update again. It should be 0 since the sensor has been off since midnight.
|
||||||
t4 = t3 + timedelta(minutes=10)
|
# Turn the sensor back on.
|
||||||
|
t4 = t3 + timedelta(minutes=10) # 00:10
|
||||||
with freeze_time(t4):
|
with freeze_time(t4):
|
||||||
async_fire_time_changed(hass, t4)
|
async_fire_time_changed(hass, t4)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
hass.states.async_set("binary_sensor.state", "on")
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert hass.states.get("sensor.sensor1").state == "0.0"
|
assert hass.states.get("sensor.sensor1").state == "0.0"
|
||||||
|
|
||||||
|
# Due to time change, start time has now moved into the future. Turn off the sensor.
|
||||||
|
t5 = t4 + timedelta(hours=1) # 01:10
|
||||||
|
with freeze_time(t5):
|
||||||
|
hass.states.async_set("binary_sensor.state", "off")
|
||||||
|
await hass.async_block_till_done(wait_background_tasks=True)
|
||||||
|
|
||||||
|
assert hass.states.get("sensor.sensor1").state == STATE_UNKNOWN
|
||||||
|
|
||||||
|
# Start time has moved back to start of today. Turn the sensor on at the same time it is recomputed
|
||||||
|
# Should query the recorder this time due to start time moving backwards in time.
|
||||||
|
t6 = t5 + timedelta(hours=1) # 02:10
|
||||||
|
|
||||||
|
def _fake_states_t6(*args, **kwargs):
|
||||||
|
return {
|
||||||
|
"binary_sensor.state": [
|
||||||
|
ha.State(
|
||||||
|
"binary_sensor.state",
|
||||||
|
"off",
|
||||||
|
last_changed=t6.replace(hour=0, minute=0, second=0, microsecond=0),
|
||||||
|
),
|
||||||
|
ha.State(
|
||||||
|
"binary_sensor.state",
|
||||||
|
"on",
|
||||||
|
last_changed=t6.replace(hour=0, minute=10, second=0, microsecond=0),
|
||||||
|
),
|
||||||
|
ha.State(
|
||||||
|
"binary_sensor.state",
|
||||||
|
"off",
|
||||||
|
last_changed=t6.replace(hour=1, minute=10, second=0, microsecond=0),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"homeassistant.components.recorder.history.state_changes_during_period",
|
||||||
|
_fake_states_t6,
|
||||||
|
),
|
||||||
|
freeze_time(t6),
|
||||||
|
):
|
||||||
|
hass.states.async_set("binary_sensor.state", "on")
|
||||||
|
await hass.async_block_till_done(wait_background_tasks=True)
|
||||||
|
|
||||||
|
assert hass.states.get("sensor.sensor1").state == "1.0"
|
||||||
|
|
||||||
|
# Another hour passes since the re-query. Total 'On' time should be 2 hours (00:10-1:10, 2:10-now (3:10))
|
||||||
|
t7 = t6 + timedelta(hours=1) # 03:10
|
||||||
|
with freeze_time(t7):
|
||||||
|
async_fire_time_changed(hass, t7)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert hass.states.get("sensor.sensor1").state == "2.0"
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("time_zone", ["Europe/Berlin", "America/Chicago", "US/Hawaii"])
|
@pytest.mark.parametrize("time_zone", ["Europe/Berlin", "America/Chicago", "US/Hawaii"])
|
||||||
async def test_end_time_with_microseconds_zeroed(
|
async def test_end_time_with_microseconds_zeroed(
|
||||||
@ -1934,7 +2025,7 @@ async def test_history_stats_handles_floored_timestamps(
|
|||||||
await async_update_entity(hass, "sensor.sensor1")
|
await async_update_entity(hass, "sensor.sensor1")
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert last_times == (start_time, start_time + timedelta(hours=2))
|
assert last_times == (start_time, start_time)
|
||||||
|
|
||||||
|
|
||||||
async def test_unique_id(
|
async def test_unique_id(
|
||||||
|
@ -3,12 +3,7 @@
|
|||||||
from collections.abc import Generator
|
from collections.abc import Generator
|
||||||
from unittest.mock import AsyncMock, MagicMock, patch
|
from unittest.mock import AsyncMock, MagicMock, patch
|
||||||
|
|
||||||
from pylamarzocco.const import (
|
from pylamarzocco.const import FirmwareType, UpdateProgressInfo, UpdateStatus
|
||||||
FirmwareType,
|
|
||||||
UpdateCommandStatus,
|
|
||||||
UpdateProgressInfo,
|
|
||||||
UpdateStatus,
|
|
||||||
)
|
|
||||||
from pylamarzocco.exceptions import RequestNotSuccessful
|
from pylamarzocco.exceptions import RequestNotSuccessful
|
||||||
from pylamarzocco.models import UpdateDetails
|
from pylamarzocco.models import UpdateDetails
|
||||||
import pytest
|
import pytest
|
||||||
@ -61,7 +56,7 @@ async def test_update_process(
|
|||||||
mock_lamarzocco.get_firmware.side_effect = [
|
mock_lamarzocco.get_firmware.side_effect = [
|
||||||
UpdateDetails(
|
UpdateDetails(
|
||||||
status=UpdateStatus.TO_UPDATE,
|
status=UpdateStatus.TO_UPDATE,
|
||||||
command_status=UpdateCommandStatus.IN_PROGRESS,
|
command_status=UpdateStatus.IN_PROGRESS,
|
||||||
progress_info=UpdateProgressInfo.STARTING_PROCESS,
|
progress_info=UpdateProgressInfo.STARTING_PROCESS,
|
||||||
progress_percentage=0,
|
progress_percentage=0,
|
||||||
),
|
),
|
||||||
@ -139,7 +134,7 @@ async def test_update_times_out(
|
|||||||
"""Test error during update."""
|
"""Test error during update."""
|
||||||
mock_lamarzocco.get_firmware.return_value = UpdateDetails(
|
mock_lamarzocco.get_firmware.return_value = UpdateDetails(
|
||||||
status=UpdateStatus.TO_UPDATE,
|
status=UpdateStatus.TO_UPDATE,
|
||||||
command_status=UpdateCommandStatus.IN_PROGRESS,
|
command_status=UpdateStatus.IN_PROGRESS,
|
||||||
progress_info=UpdateProgressInfo.STARTING_PROCESS,
|
progress_info=UpdateProgressInfo.STARTING_PROCESS,
|
||||||
progress_percentage=0,
|
progress_percentage=0,
|
||||||
)
|
)
|
||||||
|
@ -66,6 +66,34 @@ async def test_entity(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_schedule_update_webhook_event(
|
||||||
|
hass: HomeAssistant, config_entry: MockConfigEntry, netatmo_auth: AsyncMock
|
||||||
|
) -> None:
|
||||||
|
"""Test schedule update webhook event without schedule_id."""
|
||||||
|
|
||||||
|
with selected_platforms([Platform.CLIMATE]):
|
||||||
|
assert await hass.config_entries.async_setup(config_entry.entry_id)
|
||||||
|
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
webhook_id = config_entry.data[CONF_WEBHOOK_ID]
|
||||||
|
climate_entity_livingroom = "climate.livingroom"
|
||||||
|
|
||||||
|
# Save initial state
|
||||||
|
initial_state = hass.states.get(climate_entity_livingroom)
|
||||||
|
|
||||||
|
# Create a schedule update event without a schedule_id (the event is sent when temperature sets of a schedule are changed)
|
||||||
|
response = {
|
||||||
|
"home_id": "91763b24c43d3e344f424e8b",
|
||||||
|
"event_type": "schedule",
|
||||||
|
"push_type": "home_event_changed",
|
||||||
|
}
|
||||||
|
await simulate_webhook(hass, webhook_id, response)
|
||||||
|
|
||||||
|
# State should be unchanged
|
||||||
|
assert hass.states.get(climate_entity_livingroom) == initial_state
|
||||||
|
|
||||||
|
|
||||||
async def test_webhook_event_handling_thermostats(
|
async def test_webhook_event_handling_thermostats(
|
||||||
hass: HomeAssistant, config_entry: MockConfigEntry, netatmo_auth: AsyncMock
|
hass: HomeAssistant, config_entry: MockConfigEntry, netatmo_auth: AsyncMock
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -55,5 +55,12 @@
|
|||||||
"reservations_remaining": 2,
|
"reservations_remaining": 2,
|
||||||
"attachment_total_size": 0,
|
"attachment_total_size": 0,
|
||||||
"attachment_total_size_remaining": 104857600
|
"attachment_total_size_remaining": 104857600
|
||||||
|
},
|
||||||
|
"billing": {
|
||||||
|
"customer": true,
|
||||||
|
"subscription": true,
|
||||||
|
"status": "active",
|
||||||
|
"interval": "year",
|
||||||
|
"paid_until": 1754080667
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -724,6 +724,57 @@ async def test_cleanup_combined_with_NVR(
|
|||||||
reolink_connect.baichuan.mac_address.return_value = TEST_MAC_CAM
|
reolink_connect.baichuan.mac_address.return_value = TEST_MAC_CAM
|
||||||
|
|
||||||
|
|
||||||
|
async def test_cleanup_hub_and_direct_connection(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
config_entry: MockConfigEntry,
|
||||||
|
reolink_connect: MagicMock,
|
||||||
|
entity_registry: er.EntityRegistry,
|
||||||
|
device_registry: dr.DeviceRegistry,
|
||||||
|
) -> None:
|
||||||
|
"""Test cleanup of the device registry if IPC camera device was connected directly and through the hub/NVR."""
|
||||||
|
reolink_connect.channels = [0]
|
||||||
|
entity_id = f"{TEST_UID}_{TEST_UID_CAM}_record_audio"
|
||||||
|
dev_id = f"{TEST_UID}_{TEST_UID_CAM}"
|
||||||
|
domain = Platform.SWITCH
|
||||||
|
start_identifiers = {
|
||||||
|
(DOMAIN, dev_id), # IPC camera through hub
|
||||||
|
(DOMAIN, TEST_UID_CAM), # directly connected IPC camera
|
||||||
|
("OTHER_INTEGRATION", "SOME_ID"),
|
||||||
|
}
|
||||||
|
|
||||||
|
dev_entry = device_registry.async_get_or_create(
|
||||||
|
identifiers=start_identifiers,
|
||||||
|
connections={(CONNECTION_NETWORK_MAC, TEST_MAC_CAM)},
|
||||||
|
config_entry_id=config_entry.entry_id,
|
||||||
|
disabled_by=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
entity_registry.async_get_or_create(
|
||||||
|
domain=domain,
|
||||||
|
platform=DOMAIN,
|
||||||
|
unique_id=entity_id,
|
||||||
|
config_entry=config_entry,
|
||||||
|
suggested_object_id=entity_id,
|
||||||
|
disabled_by=None,
|
||||||
|
device_id=dev_entry.id,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert entity_registry.async_get_entity_id(domain, DOMAIN, entity_id)
|
||||||
|
device = device_registry.async_get_device(identifiers={(DOMAIN, dev_id)})
|
||||||
|
assert device
|
||||||
|
assert device.identifiers == start_identifiers
|
||||||
|
|
||||||
|
# setup CH 0 and host entities/device
|
||||||
|
with patch("homeassistant.components.reolink.PLATFORMS", [domain]):
|
||||||
|
assert await hass.config_entries.async_setup(config_entry.entry_id)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert entity_registry.async_get_entity_id(domain, DOMAIN, entity_id)
|
||||||
|
device = device_registry.async_get_device(identifiers={(DOMAIN, dev_id)})
|
||||||
|
assert device
|
||||||
|
assert device.identifiers == start_identifiers
|
||||||
|
|
||||||
|
|
||||||
async def test_no_repair_issue(
|
async def test_no_repair_issue(
|
||||||
hass: HomeAssistant, config_entry: MockConfigEntry, issue_registry: ir.IssueRegistry
|
hass: HomeAssistant, config_entry: MockConfigEntry, issue_registry: ir.IssueRegistry
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -541,7 +541,7 @@
|
|||||||
'hvac_modes': list([
|
'hvac_modes': list([
|
||||||
<HVACMode.OFF: 'off'>,
|
<HVACMode.OFF: 'off'>,
|
||||||
<HVACMode.COOL: 'cool'>,
|
<HVACMode.COOL: 'cool'>,
|
||||||
<HVACMode.AUTO: 'auto'>,
|
<HVACMode.HEAT_COOL: 'heat_cool'>,
|
||||||
]),
|
]),
|
||||||
'max_temp': 35.0,
|
'max_temp': 35.0,
|
||||||
'min_temp': 7.0,
|
'min_temp': 7.0,
|
||||||
@ -589,7 +589,7 @@
|
|||||||
'hvac_modes': list([
|
'hvac_modes': list([
|
||||||
<HVACMode.OFF: 'off'>,
|
<HVACMode.OFF: 'off'>,
|
||||||
<HVACMode.COOL: 'cool'>,
|
<HVACMode.COOL: 'cool'>,
|
||||||
<HVACMode.AUTO: 'auto'>,
|
<HVACMode.HEAT_COOL: 'heat_cool'>,
|
||||||
]),
|
]),
|
||||||
'max_temp': 35.0,
|
'max_temp': 35.0,
|
||||||
'min_temp': 7.0,
|
'min_temp': 7.0,
|
||||||
|
@ -625,7 +625,7 @@ async def test_thermostat_set_hvac_mode(
|
|||||||
await hass.services.async_call(
|
await hass.services.async_call(
|
||||||
CLIMATE_DOMAIN,
|
CLIMATE_DOMAIN,
|
||||||
SERVICE_SET_HVAC_MODE,
|
SERVICE_SET_HVAC_MODE,
|
||||||
{ATTR_ENTITY_ID: "climate.asd", ATTR_HVAC_MODE: HVACMode.AUTO},
|
{ATTR_ENTITY_ID: "climate.asd", ATTR_HVAC_MODE: HVACMode.HEAT_COOL},
|
||||||
blocking=True,
|
blocking=True,
|
||||||
)
|
)
|
||||||
devices.execute_device_command.assert_called_once_with(
|
devices.execute_device_command.assert_called_once_with(
|
||||||
|
@ -78,12 +78,8 @@
|
|||||||
'group_members': list([
|
'group_members': list([
|
||||||
]),
|
]),
|
||||||
'is_volume_muted': True,
|
'is_volume_muted': True,
|
||||||
'media_album_name': 'None',
|
|
||||||
'media_artist': 'None',
|
|
||||||
'media_channel': 'None',
|
|
||||||
'media_duration': 1,
|
'media_duration': 1,
|
||||||
'media_position': 1,
|
'media_position': 1,
|
||||||
'media_title': 'None',
|
|
||||||
'query_result': dict({
|
'query_result': dict({
|
||||||
}),
|
}),
|
||||||
'repeat': <RepeatMode.OFF: 'off'>,
|
'repeat': <RepeatMode.OFF: 'off'>,
|
||||||
|
@ -72,7 +72,12 @@ from homeassistant.helpers.device_registry import DeviceRegistry
|
|||||||
from homeassistant.helpers.entity_registry import EntityRegistry
|
from homeassistant.helpers.entity_registry import EntityRegistry
|
||||||
from homeassistant.util.dt import utcnow
|
from homeassistant.util.dt import utcnow
|
||||||
|
|
||||||
from .conftest import FAKE_VALID_ITEM_ID, TEST_MAC, TEST_VOLUME_STEP
|
from .conftest import (
|
||||||
|
FAKE_VALID_ITEM_ID,
|
||||||
|
TEST_MAC,
|
||||||
|
TEST_VOLUME_STEP,
|
||||||
|
configure_squeezebox_media_player_platform,
|
||||||
|
)
|
||||||
|
|
||||||
from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform
|
from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform
|
||||||
|
|
||||||
@ -100,6 +105,33 @@ async def test_entity_registry(
|
|||||||
await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id)
|
await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_squeezebox_new_player_discovery(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
config_entry: MockConfigEntry,
|
||||||
|
lms: MagicMock,
|
||||||
|
player_factory: MagicMock,
|
||||||
|
freezer: FrozenDateTimeFactory,
|
||||||
|
) -> None:
|
||||||
|
"""Test discovery of a new squeezebox player."""
|
||||||
|
# Initial setup with one player (from the 'lms' fixture)
|
||||||
|
await configure_squeezebox_media_player_platform(hass, config_entry, lms)
|
||||||
|
await hass.async_block_till_done(wait_background_tasks=True)
|
||||||
|
assert hass.states.get("media_player.test_player") is not None
|
||||||
|
assert hass.states.get("media_player.test_player_2") is None
|
||||||
|
|
||||||
|
# Simulate a new player appearing
|
||||||
|
new_player_mock = player_factory(TEST_MAC[1])
|
||||||
|
lms.async_get_players.return_value = [
|
||||||
|
lms.async_get_players.return_value[0],
|
||||||
|
new_player_mock,
|
||||||
|
]
|
||||||
|
|
||||||
|
freezer.tick(timedelta(seconds=DISCOVERY_INTERVAL))
|
||||||
|
async_fire_time_changed(hass)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
assert hass.states.get("media_player.test_player_2") is not None
|
||||||
|
|
||||||
|
|
||||||
async def test_squeezebox_player_rediscovery(
|
async def test_squeezebox_player_rediscovery(
|
||||||
hass: HomeAssistant, configured_player: MagicMock, freezer: FrozenDateTimeFactory
|
hass: HomeAssistant, configured_player: MagicMock, freezer: FrozenDateTimeFactory
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -5175,7 +5175,7 @@ async def test_hard_reset_controller(
|
|||||||
client.async_send_command.side_effect = async_send_command_no_driver_ready
|
client.async_send_command.side_effect = async_send_command_no_driver_ready
|
||||||
|
|
||||||
with patch(
|
with patch(
|
||||||
"homeassistant.components.zwave_js.api.HARD_RESET_CONTROLLER_DRIVER_READY_TIMEOUT",
|
"homeassistant.components.zwave_js.api.DRIVER_READY_TIMEOUT",
|
||||||
new=0,
|
new=0,
|
||||||
):
|
):
|
||||||
await ws_client.send_json_auto_id(
|
await ws_client.send_json_auto_id(
|
||||||
@ -5551,8 +5551,12 @@ async def test_restore_nvm(
|
|||||||
integration,
|
integration,
|
||||||
client,
|
client,
|
||||||
hass_ws_client: WebSocketGenerator,
|
hass_ws_client: WebSocketGenerator,
|
||||||
|
get_server_version: AsyncMock,
|
||||||
|
caplog: pytest.LogCaptureFixture,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test the restore NVM websocket command."""
|
"""Test the restore NVM websocket command."""
|
||||||
|
entry = integration
|
||||||
|
assert entry.unique_id == "3245146787"
|
||||||
ws_client = await hass_ws_client(hass)
|
ws_client = await hass_ws_client(hass)
|
||||||
|
|
||||||
# Set up mocks for the controller events
|
# Set up mocks for the controller events
|
||||||
@ -5632,6 +5636,45 @@ async def test_restore_nvm(
|
|||||||
},
|
},
|
||||||
require_schema=14,
|
require_schema=14,
|
||||||
)
|
)
|
||||||
|
assert entry.unique_id == "1234"
|
||||||
|
|
||||||
|
client.async_send_command.reset_mock()
|
||||||
|
|
||||||
|
# Test client connect error when getting the server version.
|
||||||
|
|
||||||
|
get_server_version.side_effect = ClientError("Boom!")
|
||||||
|
|
||||||
|
# Send the subscription request
|
||||||
|
await ws_client.send_json_auto_id(
|
||||||
|
{
|
||||||
|
"type": "zwave_js/restore_nvm",
|
||||||
|
"entry_id": entry.entry_id,
|
||||||
|
"data": "dGVzdA==", # base64 encoded "test"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify the finished event first
|
||||||
|
msg = await ws_client.receive_json()
|
||||||
|
assert msg["type"] == "event"
|
||||||
|
assert msg["event"]["event"] == "finished"
|
||||||
|
|
||||||
|
# Verify subscription success
|
||||||
|
msg = await ws_client.receive_json()
|
||||||
|
assert msg["type"] == "result"
|
||||||
|
assert msg["success"] is True
|
||||||
|
|
||||||
|
assert client.async_send_command.call_count == 3
|
||||||
|
assert client.async_send_command.call_args_list[0] == call(
|
||||||
|
{
|
||||||
|
"command": "controller.restore_nvm",
|
||||||
|
"nvmData": "dGVzdA==",
|
||||||
|
},
|
||||||
|
require_schema=14,
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
"Failed to get server version, cannot update config entry"
|
||||||
|
"unique id with new home id, after controller NVM restore"
|
||||||
|
) in caplog.text
|
||||||
|
|
||||||
client.async_send_command.reset_mock()
|
client.async_send_command.reset_mock()
|
||||||
|
|
||||||
@ -5647,7 +5690,7 @@ async def test_restore_nvm(
|
|||||||
client.async_send_command.side_effect = async_send_command_no_driver_ready
|
client.async_send_command.side_effect = async_send_command_no_driver_ready
|
||||||
|
|
||||||
with patch(
|
with patch(
|
||||||
"homeassistant.components.zwave_js.api.RESTORE_NVM_DRIVER_READY_TIMEOUT",
|
"homeassistant.components.zwave_js.api.DRIVER_READY_TIMEOUT",
|
||||||
new=0,
|
new=0,
|
||||||
):
|
):
|
||||||
# Send the subscription request
|
# Send the subscription request
|
||||||
|
File diff suppressed because it is too large
Load Diff
Loading…
x
Reference in New Issue
Block a user