mirror of
https://github.com/home-assistant/core.git
synced 2025-07-20 11:47:06 +00:00
2024.4.4 (#116045)
This commit is contained in:
commit
60be2af8ac
@ -157,3 +157,11 @@ class AirthingsHeaterEnergySensor(
|
|||||||
def native_value(self) -> StateType:
|
def native_value(self) -> StateType:
|
||||||
"""Return the value reported by the sensor."""
|
"""Return the value reported by the sensor."""
|
||||||
return self.coordinator.data[self._id].sensors[self.entity_description.key] # type: ignore[no-any-return]
|
return self.coordinator.data[self._id].sensors[self.entity_description.key] # type: ignore[no-any-return]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def available(self) -> bool:
|
||||||
|
"""Check if device and sensor is available in data."""
|
||||||
|
return (
|
||||||
|
super().available
|
||||||
|
and self.entity_description.key in self.coordinator.data[self._id].sensors
|
||||||
|
)
|
||||||
|
@ -5,5 +5,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/doods",
|
"documentation": "https://www.home-assistant.io/integrations/doods",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["pydoods"],
|
"loggers": ["pydoods"],
|
||||||
"requirements": ["pydoods==1.0.2", "Pillow==10.2.0"]
|
"requirements": ["pydoods==1.0.2", "Pillow==10.3.0"]
|
||||||
}
|
}
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"dependencies": ["http"],
|
"dependencies": ["http"],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/generic",
|
"documentation": "https://www.home-assistant.io/integrations/generic",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"requirements": ["ha-av==10.1.1", "Pillow==10.2.0"]
|
"requirements": ["ha-av==10.1.1", "Pillow==10.3.0"]
|
||||||
}
|
}
|
||||||
|
@ -7,5 +7,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/calendar.google",
|
"documentation": "https://www.home-assistant.io/integrations/calendar.google",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["googleapiclient"],
|
"loggers": ["googleapiclient"],
|
||||||
"requirements": ["gcal-sync==6.0.4", "oauth2client==4.1.3", "ical==7.0.3"]
|
"requirements": ["gcal-sync==6.0.4", "oauth2client==4.1.3", "ical==8.0.0"]
|
||||||
}
|
}
|
||||||
|
@ -93,7 +93,7 @@ BUTTON_EDIT = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
validate_addr = cv.matches_regex(r"\[\d\d:\d\d:\d\d:\d\d\]")
|
validate_addr = cv.matches_regex(r"\[(?:\d\d:)?\d\d:\d\d:\d\d\]")
|
||||||
|
|
||||||
|
|
||||||
async def validate_add_controller(
|
async def validate_add_controller(
|
||||||
@ -565,15 +565,7 @@ class HomeworksConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
CONF_KEYPADS: [
|
CONF_KEYPADS: [
|
||||||
{
|
{
|
||||||
CONF_ADDR: keypad[CONF_ADDR],
|
CONF_ADDR: keypad[CONF_ADDR],
|
||||||
CONF_BUTTONS: [
|
CONF_BUTTONS: [],
|
||||||
{
|
|
||||||
CONF_LED: button[CONF_LED],
|
|
||||||
CONF_NAME: button[CONF_NAME],
|
|
||||||
CONF_NUMBER: button[CONF_NUMBER],
|
|
||||||
CONF_RELEASE_DELAY: button[CONF_RELEASE_DELAY],
|
|
||||||
}
|
|
||||||
for button in keypad[CONF_BUTTONS]
|
|
||||||
],
|
|
||||||
CONF_NAME: keypad[CONF_NAME],
|
CONF_NAME: keypad[CONF_NAME],
|
||||||
}
|
}
|
||||||
for keypad in config[CONF_KEYPADS]
|
for keypad in config[CONF_KEYPADS]
|
||||||
|
@ -191,13 +191,13 @@ class HyperionVisiblePrioritySensor(HyperionSensor):
|
|||||||
if priority[KEY_COMPONENTID] == "COLOR":
|
if priority[KEY_COMPONENTID] == "COLOR":
|
||||||
state_value = priority[KEY_VALUE][KEY_RGB]
|
state_value = priority[KEY_VALUE][KEY_RGB]
|
||||||
else:
|
else:
|
||||||
state_value = priority[KEY_OWNER]
|
state_value = priority.get(KEY_OWNER)
|
||||||
|
|
||||||
attrs = {
|
attrs = {
|
||||||
"component_id": priority[KEY_COMPONENTID],
|
"component_id": priority[KEY_COMPONENTID],
|
||||||
"origin": priority[KEY_ORIGIN],
|
"origin": priority[KEY_ORIGIN],
|
||||||
"priority": priority[KEY_PRIORITY],
|
"priority": priority[KEY_PRIORITY],
|
||||||
"owner": priority[KEY_OWNER],
|
"owner": priority.get(KEY_OWNER),
|
||||||
}
|
}
|
||||||
|
|
||||||
if priority[KEY_COMPONENTID] == "COLOR":
|
if priority[KEY_COMPONENTID] == "COLOR":
|
||||||
|
@ -7,5 +7,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/image_upload",
|
"documentation": "https://www.home-assistant.io/integrations/image_upload",
|
||||||
"integration_type": "system",
|
"integration_type": "system",
|
||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": ["Pillow==10.2.0"]
|
"requirements": ["Pillow==10.3.0"]
|
||||||
}
|
}
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/local_calendar",
|
"documentation": "https://www.home-assistant.io/integrations/local_calendar",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["ical"],
|
"loggers": ["ical"],
|
||||||
"requirements": ["ical==7.0.3"]
|
"requirements": ["ical==8.0.0"]
|
||||||
}
|
}
|
||||||
|
@ -5,5 +5,5 @@
|
|||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/local_todo",
|
"documentation": "https://www.home-assistant.io/integrations/local_todo",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"requirements": ["ical==7.0.3"]
|
"requirements": ["ical==8.0.0"]
|
||||||
}
|
}
|
||||||
|
@ -5,5 +5,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/matrix",
|
"documentation": "https://www.home-assistant.io/integrations/matrix",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["matrix_client"],
|
"loggers": ["matrix_client"],
|
||||||
"requirements": ["matrix-nio==0.24.0", "Pillow==10.2.0"]
|
"requirements": ["matrix-nio==0.24.0", "Pillow==10.3.0"]
|
||||||
}
|
}
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["pymodbus"],
|
"loggers": ["pymodbus"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["pymodbus==3.6.7"]
|
"requirements": ["pymodbus==3.6.8"]
|
||||||
}
|
}
|
||||||
|
@ -8,7 +8,7 @@
|
|||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["plexapi", "plexwebsocket"],
|
"loggers": ["plexapi", "plexwebsocket"],
|
||||||
"requirements": [
|
"requirements": [
|
||||||
"PlexAPI==4.15.11",
|
"PlexAPI==4.15.12",
|
||||||
"plexauth==0.0.6",
|
"plexauth==0.0.6",
|
||||||
"plexwebsocket==0.0.14"
|
"plexwebsocket==0.0.14"
|
||||||
],
|
],
|
||||||
|
@ -3,5 +3,5 @@
|
|||||||
"name": "Camera Proxy",
|
"name": "Camera Proxy",
|
||||||
"codeowners": [],
|
"codeowners": [],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/proxy",
|
"documentation": "https://www.home-assistant.io/integrations/proxy",
|
||||||
"requirements": ["Pillow==10.2.0"]
|
"requirements": ["Pillow==10.3.0"]
|
||||||
}
|
}
|
||||||
|
@ -5,5 +5,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/qrcode",
|
"documentation": "https://www.home-assistant.io/integrations/qrcode",
|
||||||
"iot_class": "calculated",
|
"iot_class": "calculated",
|
||||||
"loggers": ["pyzbar"],
|
"loggers": ["pyzbar"],
|
||||||
"requirements": ["Pillow==10.2.0", "pyzbar==0.1.7"]
|
"requirements": ["Pillow==10.3.0", "pyzbar==0.1.7"]
|
||||||
}
|
}
|
||||||
|
@ -8,5 +8,5 @@
|
|||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["renault_api"],
|
"loggers": ["renault_api"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["renault-api==0.2.1"]
|
"requirements": ["renault-api==0.2.2"]
|
||||||
}
|
}
|
||||||
|
@ -71,6 +71,6 @@ SENSOR_TYPES: tuple[RenaultSelectEntityDescription, ...] = (
|
|||||||
coordinator="charge_mode",
|
coordinator="charge_mode",
|
||||||
data_key="chargeMode",
|
data_key="chargeMode",
|
||||||
translation_key="charge_mode",
|
translation_key="charge_mode",
|
||||||
options=["always", "always_charging", "schedule_mode"],
|
options=["always", "always_charging", "schedule_mode", "scheduled"],
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
@ -46,15 +46,17 @@ from .triggers.turn_on import async_get_turn_on_trigger
|
|||||||
SOURCES = {"TV": "KEY_TV", "HDMI": "KEY_HDMI"}
|
SOURCES = {"TV": "KEY_TV", "HDMI": "KEY_HDMI"}
|
||||||
|
|
||||||
SUPPORT_SAMSUNGTV = (
|
SUPPORT_SAMSUNGTV = (
|
||||||
MediaPlayerEntityFeature.PAUSE
|
MediaPlayerEntityFeature.NEXT_TRACK
|
||||||
| MediaPlayerEntityFeature.VOLUME_STEP
|
| MediaPlayerEntityFeature.PAUSE
|
||||||
| MediaPlayerEntityFeature.VOLUME_MUTE
|
|
||||||
| MediaPlayerEntityFeature.PREVIOUS_TRACK
|
|
||||||
| MediaPlayerEntityFeature.SELECT_SOURCE
|
|
||||||
| MediaPlayerEntityFeature.NEXT_TRACK
|
|
||||||
| MediaPlayerEntityFeature.TURN_OFF
|
|
||||||
| MediaPlayerEntityFeature.PLAY
|
| MediaPlayerEntityFeature.PLAY
|
||||||
| MediaPlayerEntityFeature.PLAY_MEDIA
|
| MediaPlayerEntityFeature.PLAY_MEDIA
|
||||||
|
| MediaPlayerEntityFeature.PREVIOUS_TRACK
|
||||||
|
| MediaPlayerEntityFeature.SELECT_SOURCE
|
||||||
|
| MediaPlayerEntityFeature.STOP
|
||||||
|
| MediaPlayerEntityFeature.TURN_OFF
|
||||||
|
| MediaPlayerEntityFeature.VOLUME_MUTE
|
||||||
|
| MediaPlayerEntityFeature.VOLUME_SET
|
||||||
|
| MediaPlayerEntityFeature.VOLUME_STEP
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -4,5 +4,5 @@
|
|||||||
"codeowners": ["@fabaff"],
|
"codeowners": ["@fabaff"],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/seven_segments",
|
"documentation": "https://www.home-assistant.io/integrations/seven_segments",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"requirements": ["Pillow==10.2.0"]
|
"requirements": ["Pillow==10.3.0"]
|
||||||
}
|
}
|
||||||
|
@ -5,5 +5,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/sighthound",
|
"documentation": "https://www.home-assistant.io/integrations/sighthound",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["simplehound"],
|
"loggers": ["simplehound"],
|
||||||
"requirements": ["Pillow==10.2.0", "simplehound==0.3"]
|
"requirements": ["Pillow==10.3.0", "simplehound==0.3"]
|
||||||
}
|
}
|
||||||
|
@ -5,5 +5,5 @@
|
|||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/sql",
|
"documentation": "https://www.home-assistant.io/integrations/sql",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"requirements": ["SQLAlchemy==2.0.29", "sqlparse==0.4.4"]
|
"requirements": ["SQLAlchemy==2.0.29", "sqlparse==0.5.0"]
|
||||||
}
|
}
|
||||||
|
@ -28,7 +28,6 @@ from homeassistant.const import (
|
|||||||
CONF_PASSWORD,
|
CONF_PASSWORD,
|
||||||
CONF_PORT,
|
CONF_PORT,
|
||||||
CONF_USERNAME,
|
CONF_USERNAME,
|
||||||
EVENT_HOMEASSISTANT_START,
|
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers import (
|
from homeassistant.helpers import (
|
||||||
@ -44,6 +43,7 @@ from homeassistant.helpers.dispatcher import (
|
|||||||
)
|
)
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
from homeassistant.helpers.event import async_call_later
|
from homeassistant.helpers.event import async_call_later
|
||||||
|
from homeassistant.helpers.start import async_at_start
|
||||||
from homeassistant.util.dt import utcnow
|
from homeassistant.util.dt import utcnow
|
||||||
|
|
||||||
from .browse_media import (
|
from .browse_media import (
|
||||||
@ -207,12 +207,7 @@ async def async_setup_entry(
|
|||||||
platform.async_register_entity_service(SERVICE_UNSYNC, None, "async_unsync")
|
platform.async_register_entity_service(SERVICE_UNSYNC, None, "async_unsync")
|
||||||
|
|
||||||
# Start server discovery task if not already running
|
# Start server discovery task if not already running
|
||||||
if hass.is_running:
|
config_entry.async_on_unload(async_at_start(hass, start_server_discovery))
|
||||||
hass.async_create_task(start_server_discovery(hass))
|
|
||||||
else:
|
|
||||||
hass.bus.async_listen_once(
|
|
||||||
EVENT_HOMEASSISTANT_START, start_server_discovery(hass)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class SqueezeBoxEntity(MediaPlayerEntity):
|
class SqueezeBoxEntity(MediaPlayerEntity):
|
||||||
|
@ -91,7 +91,7 @@ class FuelPriceSensor(TankerkoenigCoordinatorEntity, SensorEntity):
|
|||||||
self._fuel_type = fuel_type
|
self._fuel_type = fuel_type
|
||||||
self._attr_translation_key = fuel_type
|
self._attr_translation_key = fuel_type
|
||||||
self._attr_unique_id = f"{station.id}_{fuel_type}"
|
self._attr_unique_id = f"{station.id}_{fuel_type}"
|
||||||
attrs = {
|
attrs: dict[str, int | str | float | None] = {
|
||||||
ATTR_BRAND: station.brand,
|
ATTR_BRAND: station.brand,
|
||||||
ATTR_FUEL_TYPE: fuel_type,
|
ATTR_FUEL_TYPE: fuel_type,
|
||||||
ATTR_STATION_NAME: station.name,
|
ATTR_STATION_NAME: station.name,
|
||||||
@ -102,8 +102,8 @@ class FuelPriceSensor(TankerkoenigCoordinatorEntity, SensorEntity):
|
|||||||
}
|
}
|
||||||
|
|
||||||
if coordinator.show_on_map:
|
if coordinator.show_on_map:
|
||||||
attrs[ATTR_LATITUDE] = str(station.lat)
|
attrs[ATTR_LATITUDE] = station.lat
|
||||||
attrs[ATTR_LONGITUDE] = str(station.lng)
|
attrs[ATTR_LONGITUDE] = station.lng
|
||||||
self._attr_extra_state_attributes = attrs
|
self._attr_extra_state_attributes = attrs
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -10,6 +10,6 @@
|
|||||||
"tf-models-official==2.5.0",
|
"tf-models-official==2.5.0",
|
||||||
"pycocotools==2.0.6",
|
"pycocotools==2.0.6",
|
||||||
"numpy==1.26.0",
|
"numpy==1.26.0",
|
||||||
"Pillow==10.2.0"
|
"Pillow==10.3.0"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@ -58,7 +58,7 @@ SHIFT_STATES = {"P": "p", "D": "d", "R": "r", "N": "n"}
|
|||||||
class TeslemetrySensorEntityDescription(SensorEntityDescription):
|
class TeslemetrySensorEntityDescription(SensorEntityDescription):
|
||||||
"""Describes Teslemetry Sensor entity."""
|
"""Describes Teslemetry Sensor entity."""
|
||||||
|
|
||||||
value_fn: Callable[[StateType], StateType | datetime] = lambda x: x
|
value_fn: Callable[[StateType], StateType] = lambda x: x
|
||||||
|
|
||||||
|
|
||||||
VEHICLE_DESCRIPTIONS: tuple[TeslemetrySensorEntityDescription, ...] = (
|
VEHICLE_DESCRIPTIONS: tuple[TeslemetrySensorEntityDescription, ...] = (
|
||||||
@ -447,8 +447,14 @@ class TeslemetryVehicleSensorEntity(TeslemetryVehicleEntity, SensorEntity):
|
|||||||
description: TeslemetrySensorEntityDescription,
|
description: TeslemetrySensorEntityDescription,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the sensor."""
|
"""Initialize the sensor."""
|
||||||
|
self.entity_description = description
|
||||||
super().__init__(vehicle, description.key)
|
super().__init__(vehicle, description.key)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def native_value(self) -> StateType:
|
||||||
|
"""Return the state of the sensor."""
|
||||||
|
return self.entity_description.value_fn(self._value)
|
||||||
|
|
||||||
|
|
||||||
class TeslemetryVehicleTimeSensorEntity(TeslemetryVehicleEntity, SensorEntity):
|
class TeslemetryVehicleTimeSensorEntity(TeslemetryVehicleEntity, SensorEntity):
|
||||||
"""Base class for Teslemetry vehicle metric sensors."""
|
"""Base class for Teslemetry vehicle metric sensors."""
|
||||||
|
@ -8,7 +8,7 @@
|
|||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["aiounifi"],
|
"loggers": ["aiounifi"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["aiounifi==74"],
|
"requirements": ["aiounifi==75"],
|
||||||
"ssdp": [
|
"ssdp": [
|
||||||
{
|
{
|
||||||
"manufacturer": "Ubiquiti Networks",
|
"manufacturer": "Ubiquiti Networks",
|
||||||
|
@ -12,6 +12,7 @@ from dataclasses import dataclass, field
|
|||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from typing import TYPE_CHECKING, Any, cast
|
from typing import TYPE_CHECKING, Any, cast
|
||||||
|
|
||||||
|
from aiohttp import ClientError
|
||||||
from aiohttp.hdrs import METH_POST
|
from aiohttp.hdrs import METH_POST
|
||||||
from aiohttp.web import Request, Response
|
from aiohttp.web import Request, Response
|
||||||
from aiowithings import NotificationCategory, WithingsClient
|
from aiowithings import NotificationCategory, WithingsClient
|
||||||
@ -340,7 +341,11 @@ class WithingsWebhookManager:
|
|||||||
|
|
||||||
async def async_unsubscribe_webhooks(client: WithingsClient) -> None:
|
async def async_unsubscribe_webhooks(client: WithingsClient) -> None:
|
||||||
"""Unsubscribe to all Withings webhooks."""
|
"""Unsubscribe to all Withings webhooks."""
|
||||||
|
try:
|
||||||
current_webhooks = await client.list_notification_configurations()
|
current_webhooks = await client.list_notification_configurations()
|
||||||
|
except ClientError:
|
||||||
|
LOGGER.exception("Error when unsubscribing webhooks")
|
||||||
|
return
|
||||||
|
|
||||||
for webhook_configuration in current_webhooks:
|
for webhook_configuration in current_webhooks:
|
||||||
LOGGER.debug(
|
LOGGER.debug(
|
||||||
|
@ -5,5 +5,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/xmpp",
|
"documentation": "https://www.home-assistant.io/integrations/xmpp",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["pyasn1", "slixmpp"],
|
"loggers": ["pyasn1", "slixmpp"],
|
||||||
"requirements": ["slixmpp==1.8.4", "emoji==2.8.0"]
|
"requirements": ["slixmpp==1.8.5", "emoji==2.8.0"]
|
||||||
}
|
}
|
||||||
|
@ -8,5 +8,5 @@
|
|||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["zeroconf"],
|
"loggers": ["zeroconf"],
|
||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": ["zeroconf==0.132.0"]
|
"requirements": ["zeroconf==0.132.2"]
|
||||||
}
|
}
|
||||||
|
@ -18,7 +18,7 @@ from .util.signal_type import SignalType
|
|||||||
APPLICATION_NAME: Final = "HomeAssistant"
|
APPLICATION_NAME: Final = "HomeAssistant"
|
||||||
MAJOR_VERSION: Final = 2024
|
MAJOR_VERSION: Final = 2024
|
||||||
MINOR_VERSION: Final = 4
|
MINOR_VERSION: Final = 4
|
||||||
PATCH_VERSION: Final = "3"
|
PATCH_VERSION: Final = "4"
|
||||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0)
|
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0)
|
||||||
|
@ -656,6 +656,12 @@ class _ScriptRun:
|
|||||||
# check if condition already okay
|
# check if condition already okay
|
||||||
if condition.async_template(self._hass, wait_template, self._variables, False):
|
if condition.async_template(self._hass, wait_template, self._variables, False):
|
||||||
self._variables["wait"]["completed"] = True
|
self._variables["wait"]["completed"] = True
|
||||||
|
self._changed()
|
||||||
|
return
|
||||||
|
|
||||||
|
if timeout == 0:
|
||||||
|
self._changed()
|
||||||
|
self._async_handle_timeout()
|
||||||
return
|
return
|
||||||
|
|
||||||
futures, timeout_handle, timeout_future = self._async_futures_with_timeout(
|
futures, timeout_handle, timeout_future = self._async_futures_with_timeout(
|
||||||
@ -1085,6 +1091,11 @@ class _ScriptRun:
|
|||||||
self._variables["wait"] = {"remaining": timeout, "trigger": None}
|
self._variables["wait"] = {"remaining": timeout, "trigger": None}
|
||||||
trace_set_result(wait=self._variables["wait"])
|
trace_set_result(wait=self._variables["wait"])
|
||||||
|
|
||||||
|
if timeout == 0:
|
||||||
|
self._changed()
|
||||||
|
self._async_handle_timeout()
|
||||||
|
return
|
||||||
|
|
||||||
futures, timeout_handle, timeout_future = self._async_futures_with_timeout(
|
futures, timeout_handle, timeout_future = self._async_futures_with_timeout(
|
||||||
timeout
|
timeout
|
||||||
)
|
)
|
||||||
@ -1115,6 +1126,14 @@ class _ScriptRun:
|
|||||||
futures, timeout_handle, timeout_future, remove_triggers
|
futures, timeout_handle, timeout_future, remove_triggers
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def _async_handle_timeout(self) -> None:
|
||||||
|
"""Handle timeout."""
|
||||||
|
self._variables["wait"]["remaining"] = 0.0
|
||||||
|
if not self._action.get(CONF_CONTINUE_ON_TIMEOUT, True):
|
||||||
|
self._log(_TIMEOUT_MSG)
|
||||||
|
trace_set_result(wait=self._variables["wait"], timeout=True)
|
||||||
|
raise _AbortScript from TimeoutError()
|
||||||
|
|
||||||
async def _async_wait_with_optional_timeout(
|
async def _async_wait_with_optional_timeout(
|
||||||
self,
|
self,
|
||||||
futures: list[asyncio.Future[None]],
|
futures: list[asyncio.Future[None]],
|
||||||
@ -1125,11 +1144,7 @@ class _ScriptRun:
|
|||||||
try:
|
try:
|
||||||
await asyncio.wait(futures, return_when=asyncio.FIRST_COMPLETED)
|
await asyncio.wait(futures, return_when=asyncio.FIRST_COMPLETED)
|
||||||
if timeout_future and timeout_future.done():
|
if timeout_future and timeout_future.done():
|
||||||
self._variables["wait"]["remaining"] = 0.0
|
self._async_handle_timeout()
|
||||||
if not self._action.get(CONF_CONTINUE_ON_TIMEOUT, True):
|
|
||||||
self._log(_TIMEOUT_MSG)
|
|
||||||
trace_set_result(wait=self._variables["wait"], timeout=True)
|
|
||||||
raise _AbortScript from TimeoutError()
|
|
||||||
finally:
|
finally:
|
||||||
if timeout_future and not timeout_future.done() and timeout_handle:
|
if timeout_future and not timeout_future.done() and timeout_handle:
|
||||||
timeout_handle.cancel()
|
timeout_handle.cancel()
|
||||||
|
@ -403,6 +403,8 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]):
|
|||||||
if not auth_failed and self._listeners and not self.hass.is_stopping:
|
if not auth_failed and self._listeners and not self.hass.is_stopping:
|
||||||
self._schedule_refresh()
|
self._schedule_refresh()
|
||||||
|
|
||||||
|
self._async_refresh_finished()
|
||||||
|
|
||||||
if not self.last_update_success and not previous_update_success:
|
if not self.last_update_success and not previous_update_success:
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -413,6 +415,15 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]):
|
|||||||
):
|
):
|
||||||
self.async_update_listeners()
|
self.async_update_listeners()
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _async_refresh_finished(self) -> None:
|
||||||
|
"""Handle when a refresh has finished.
|
||||||
|
|
||||||
|
Called when refresh is finished before listeners are updated.
|
||||||
|
|
||||||
|
To be overridden by subclasses.
|
||||||
|
"""
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_set_update_error(self, err: Exception) -> None:
|
def async_set_update_error(self, err: Exception) -> None:
|
||||||
"""Manually set an error, log the message and notify listeners."""
|
"""Manually set an error, log the message and notify listeners."""
|
||||||
@ -446,20 +457,9 @@ class TimestampDataUpdateCoordinator(DataUpdateCoordinator[_DataT]):
|
|||||||
|
|
||||||
last_update_success_time: datetime | None = None
|
last_update_success_time: datetime | None = None
|
||||||
|
|
||||||
async def _async_refresh(
|
@callback
|
||||||
self,
|
def _async_refresh_finished(self) -> None:
|
||||||
log_failures: bool = True,
|
"""Handle when a refresh has finished."""
|
||||||
raise_on_auth_failed: bool = False,
|
|
||||||
scheduled: bool = False,
|
|
||||||
raise_on_entry_error: bool = False,
|
|
||||||
) -> None:
|
|
||||||
"""Refresh data."""
|
|
||||||
await super()._async_refresh(
|
|
||||||
log_failures,
|
|
||||||
raise_on_auth_failed,
|
|
||||||
scheduled,
|
|
||||||
raise_on_entry_error,
|
|
||||||
)
|
|
||||||
if self.last_update_success:
|
if self.last_update_success:
|
||||||
self.last_update_success_time = utcnow()
|
self.last_update_success_time = utcnow()
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@ aiodhcpwatcher==1.0.0
|
|||||||
aiodiscover==2.0.0
|
aiodiscover==2.0.0
|
||||||
aiohttp-fast-url-dispatcher==0.3.0
|
aiohttp-fast-url-dispatcher==0.3.0
|
||||||
aiohttp-zlib-ng==0.3.1
|
aiohttp-zlib-ng==0.3.1
|
||||||
aiohttp==3.9.4
|
aiohttp==3.9.5
|
||||||
aiohttp_cors==0.7.0
|
aiohttp_cors==0.7.0
|
||||||
astral==2.2
|
astral==2.2
|
||||||
async-interrupt==1.1.1
|
async-interrupt==1.1.1
|
||||||
@ -40,7 +40,7 @@ mutagen==1.47.0
|
|||||||
orjson==3.9.15
|
orjson==3.9.15
|
||||||
packaging>=23.1
|
packaging>=23.1
|
||||||
paho-mqtt==1.6.1
|
paho-mqtt==1.6.1
|
||||||
Pillow==10.2.0
|
Pillow==10.3.0
|
||||||
pip>=21.3.1
|
pip>=21.3.1
|
||||||
psutil-home-assistant==0.0.1
|
psutil-home-assistant==0.0.1
|
||||||
PyJWT==2.8.0
|
PyJWT==2.8.0
|
||||||
@ -60,7 +60,7 @@ voluptuous-serialize==2.6.0
|
|||||||
voluptuous==0.13.1
|
voluptuous==0.13.1
|
||||||
webrtc-noise-gain==1.2.3
|
webrtc-noise-gain==1.2.3
|
||||||
yarl==1.9.4
|
yarl==1.9.4
|
||||||
zeroconf==0.132.0
|
zeroconf==0.132.2
|
||||||
|
|
||||||
# Constrain pycryptodome to avoid vulnerability
|
# Constrain pycryptodome to avoid vulnerability
|
||||||
# see https://github.com/home-assistant/core/pull/16238
|
# see https://github.com/home-assistant/core/pull/16238
|
||||||
@ -107,7 +107,7 @@ regex==2021.8.28
|
|||||||
# requirements so we can directly link HA versions to these library versions.
|
# requirements so we can directly link HA versions to these library versions.
|
||||||
anyio==4.3.0
|
anyio==4.3.0
|
||||||
h11==0.14.0
|
h11==0.14.0
|
||||||
httpcore==1.0.4
|
httpcore==1.0.5
|
||||||
|
|
||||||
# Ensure we have a hyperframe version that works in Python 3.10
|
# Ensure we have a hyperframe version that works in Python 3.10
|
||||||
# 5.2.0 fixed a collections abc deprecation
|
# 5.2.0 fixed a collections abc deprecation
|
||||||
|
@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "homeassistant"
|
name = "homeassistant"
|
||||||
version = "2024.4.3"
|
version = "2024.4.4"
|
||||||
license = {text = "Apache-2.0"}
|
license = {text = "Apache-2.0"}
|
||||||
description = "Open-source home automation platform running on Python 3."
|
description = "Open-source home automation platform running on Python 3."
|
||||||
readme = "README.rst"
|
readme = "README.rst"
|
||||||
@ -23,7 +23,7 @@ classifiers = [
|
|||||||
]
|
]
|
||||||
requires-python = ">=3.12.0"
|
requires-python = ">=3.12.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aiohttp==3.9.4",
|
"aiohttp==3.9.5",
|
||||||
"aiohttp_cors==0.7.0",
|
"aiohttp_cors==0.7.0",
|
||||||
"aiohttp-fast-url-dispatcher==0.3.0",
|
"aiohttp-fast-url-dispatcher==0.3.0",
|
||||||
"aiohttp-zlib-ng==0.3.1",
|
"aiohttp-zlib-ng==0.3.1",
|
||||||
@ -49,7 +49,7 @@ dependencies = [
|
|||||||
"PyJWT==2.8.0",
|
"PyJWT==2.8.0",
|
||||||
# PyJWT has loose dependency. We want the latest one.
|
# PyJWT has loose dependency. We want the latest one.
|
||||||
"cryptography==42.0.5",
|
"cryptography==42.0.5",
|
||||||
"Pillow==10.2.0",
|
"Pillow==10.3.0",
|
||||||
"pyOpenSSL==24.1.0",
|
"pyOpenSSL==24.1.0",
|
||||||
"orjson==3.9.15",
|
"orjson==3.9.15",
|
||||||
"packaging>=23.1",
|
"packaging>=23.1",
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
-c homeassistant/package_constraints.txt
|
-c homeassistant/package_constraints.txt
|
||||||
|
|
||||||
# Home Assistant Core
|
# Home Assistant Core
|
||||||
aiohttp==3.9.4
|
aiohttp==3.9.5
|
||||||
aiohttp_cors==0.7.0
|
aiohttp_cors==0.7.0
|
||||||
aiohttp-fast-url-dispatcher==0.3.0
|
aiohttp-fast-url-dispatcher==0.3.0
|
||||||
aiohttp-zlib-ng==0.3.1
|
aiohttp-zlib-ng==0.3.1
|
||||||
@ -24,7 +24,7 @@ Jinja2==3.1.3
|
|||||||
lru-dict==1.3.0
|
lru-dict==1.3.0
|
||||||
PyJWT==2.8.0
|
PyJWT==2.8.0
|
||||||
cryptography==42.0.5
|
cryptography==42.0.5
|
||||||
Pillow==10.2.0
|
Pillow==10.3.0
|
||||||
pyOpenSSL==24.1.0
|
pyOpenSSL==24.1.0
|
||||||
orjson==3.9.15
|
orjson==3.9.15
|
||||||
packaging>=23.1
|
packaging>=23.1
|
||||||
|
@ -42,10 +42,10 @@ Mastodon.py==1.8.1
|
|||||||
# homeassistant.components.seven_segments
|
# homeassistant.components.seven_segments
|
||||||
# homeassistant.components.sighthound
|
# homeassistant.components.sighthound
|
||||||
# homeassistant.components.tensorflow
|
# homeassistant.components.tensorflow
|
||||||
Pillow==10.2.0
|
Pillow==10.3.0
|
||||||
|
|
||||||
# homeassistant.components.plex
|
# homeassistant.components.plex
|
||||||
PlexAPI==4.15.11
|
PlexAPI==4.15.12
|
||||||
|
|
||||||
# homeassistant.components.progettihwsw
|
# homeassistant.components.progettihwsw
|
||||||
ProgettiHWSW==0.1.3
|
ProgettiHWSW==0.1.3
|
||||||
@ -392,7 +392,7 @@ aiotankerkoenig==0.4.1
|
|||||||
aiotractive==0.5.6
|
aiotractive==0.5.6
|
||||||
|
|
||||||
# homeassistant.components.unifi
|
# homeassistant.components.unifi
|
||||||
aiounifi==74
|
aiounifi==75
|
||||||
|
|
||||||
# homeassistant.components.vlc_telnet
|
# homeassistant.components.vlc_telnet
|
||||||
aiovlc==0.1.0
|
aiovlc==0.1.0
|
||||||
@ -1118,7 +1118,7 @@ ibmiotf==0.3.4
|
|||||||
# homeassistant.components.google
|
# homeassistant.components.google
|
||||||
# homeassistant.components.local_calendar
|
# homeassistant.components.local_calendar
|
||||||
# homeassistant.components.local_todo
|
# homeassistant.components.local_todo
|
||||||
ical==7.0.3
|
ical==8.0.0
|
||||||
|
|
||||||
# homeassistant.components.ping
|
# homeassistant.components.ping
|
||||||
icmplib==3.0
|
icmplib==3.0
|
||||||
@ -1973,7 +1973,7 @@ pymitv==1.4.3
|
|||||||
pymochad==0.2.0
|
pymochad==0.2.0
|
||||||
|
|
||||||
# homeassistant.components.modbus
|
# homeassistant.components.modbus
|
||||||
pymodbus==3.6.7
|
pymodbus==3.6.8
|
||||||
|
|
||||||
# homeassistant.components.monoprice
|
# homeassistant.components.monoprice
|
||||||
pymonoprice==0.4
|
pymonoprice==0.4
|
||||||
@ -2429,7 +2429,7 @@ refoss-ha==1.2.0
|
|||||||
regenmaschine==2024.03.0
|
regenmaschine==2024.03.0
|
||||||
|
|
||||||
# homeassistant.components.renault
|
# homeassistant.components.renault
|
||||||
renault-api==0.2.1
|
renault-api==0.2.2
|
||||||
|
|
||||||
# homeassistant.components.renson
|
# homeassistant.components.renson
|
||||||
renson-endura-delta==1.7.1
|
renson-endura-delta==1.7.1
|
||||||
@ -2553,7 +2553,7 @@ sisyphus-control==3.1.3
|
|||||||
slackclient==2.5.0
|
slackclient==2.5.0
|
||||||
|
|
||||||
# homeassistant.components.xmpp
|
# homeassistant.components.xmpp
|
||||||
slixmpp==1.8.4
|
slixmpp==1.8.5
|
||||||
|
|
||||||
# homeassistant.components.smart_meter_texas
|
# homeassistant.components.smart_meter_texas
|
||||||
smart-meter-texas==0.4.7
|
smart-meter-texas==0.4.7
|
||||||
@ -2595,7 +2595,7 @@ spiderpy==1.6.1
|
|||||||
spotipy==2.23.0
|
spotipy==2.23.0
|
||||||
|
|
||||||
# homeassistant.components.sql
|
# homeassistant.components.sql
|
||||||
sqlparse==0.4.4
|
sqlparse==0.5.0
|
||||||
|
|
||||||
# homeassistant.components.srp_energy
|
# homeassistant.components.srp_energy
|
||||||
srpenergy==1.3.6
|
srpenergy==1.3.6
|
||||||
@ -2928,7 +2928,7 @@ zamg==0.3.6
|
|||||||
zengge==0.2
|
zengge==0.2
|
||||||
|
|
||||||
# homeassistant.components.zeroconf
|
# homeassistant.components.zeroconf
|
||||||
zeroconf==0.132.0
|
zeroconf==0.132.2
|
||||||
|
|
||||||
# homeassistant.components.zeversolar
|
# homeassistant.components.zeversolar
|
||||||
zeversolar==0.3.1
|
zeversolar==0.3.1
|
||||||
|
@ -36,10 +36,10 @@ HATasmota==0.8.0
|
|||||||
# homeassistant.components.seven_segments
|
# homeassistant.components.seven_segments
|
||||||
# homeassistant.components.sighthound
|
# homeassistant.components.sighthound
|
||||||
# homeassistant.components.tensorflow
|
# homeassistant.components.tensorflow
|
||||||
Pillow==10.2.0
|
Pillow==10.3.0
|
||||||
|
|
||||||
# homeassistant.components.plex
|
# homeassistant.components.plex
|
||||||
PlexAPI==4.15.11
|
PlexAPI==4.15.12
|
||||||
|
|
||||||
# homeassistant.components.progettihwsw
|
# homeassistant.components.progettihwsw
|
||||||
ProgettiHWSW==0.1.3
|
ProgettiHWSW==0.1.3
|
||||||
@ -365,7 +365,7 @@ aiotankerkoenig==0.4.1
|
|||||||
aiotractive==0.5.6
|
aiotractive==0.5.6
|
||||||
|
|
||||||
# homeassistant.components.unifi
|
# homeassistant.components.unifi
|
||||||
aiounifi==74
|
aiounifi==75
|
||||||
|
|
||||||
# homeassistant.components.vlc_telnet
|
# homeassistant.components.vlc_telnet
|
||||||
aiovlc==0.1.0
|
aiovlc==0.1.0
|
||||||
@ -908,7 +908,7 @@ ibeacon-ble==1.2.0
|
|||||||
# homeassistant.components.google
|
# homeassistant.components.google
|
||||||
# homeassistant.components.local_calendar
|
# homeassistant.components.local_calendar
|
||||||
# homeassistant.components.local_todo
|
# homeassistant.components.local_todo
|
||||||
ical==7.0.3
|
ical==8.0.0
|
||||||
|
|
||||||
# homeassistant.components.ping
|
# homeassistant.components.ping
|
||||||
icmplib==3.0
|
icmplib==3.0
|
||||||
@ -1533,7 +1533,7 @@ pymeteoclimatic==0.1.0
|
|||||||
pymochad==0.2.0
|
pymochad==0.2.0
|
||||||
|
|
||||||
# homeassistant.components.modbus
|
# homeassistant.components.modbus
|
||||||
pymodbus==3.6.7
|
pymodbus==3.6.8
|
||||||
|
|
||||||
# homeassistant.components.monoprice
|
# homeassistant.components.monoprice
|
||||||
pymonoprice==0.4
|
pymonoprice==0.4
|
||||||
@ -1875,7 +1875,7 @@ refoss-ha==1.2.0
|
|||||||
regenmaschine==2024.03.0
|
regenmaschine==2024.03.0
|
||||||
|
|
||||||
# homeassistant.components.renault
|
# homeassistant.components.renault
|
||||||
renault-api==0.2.1
|
renault-api==0.2.2
|
||||||
|
|
||||||
# homeassistant.components.renson
|
# homeassistant.components.renson
|
||||||
renson-endura-delta==1.7.1
|
renson-endura-delta==1.7.1
|
||||||
@ -1999,7 +1999,7 @@ spiderpy==1.6.1
|
|||||||
spotipy==2.23.0
|
spotipy==2.23.0
|
||||||
|
|
||||||
# homeassistant.components.sql
|
# homeassistant.components.sql
|
||||||
sqlparse==0.4.4
|
sqlparse==0.5.0
|
||||||
|
|
||||||
# homeassistant.components.srp_energy
|
# homeassistant.components.srp_energy
|
||||||
srpenergy==1.3.6
|
srpenergy==1.3.6
|
||||||
@ -2263,7 +2263,7 @@ yt-dlp==2024.04.09
|
|||||||
zamg==0.3.6
|
zamg==0.3.6
|
||||||
|
|
||||||
# homeassistant.components.zeroconf
|
# homeassistant.components.zeroconf
|
||||||
zeroconf==0.132.0
|
zeroconf==0.132.2
|
||||||
|
|
||||||
# homeassistant.components.zeversolar
|
# homeassistant.components.zeversolar
|
||||||
zeversolar==0.3.1
|
zeversolar==0.3.1
|
||||||
|
@ -100,7 +100,7 @@ regex==2021.8.28
|
|||||||
# requirements so we can directly link HA versions to these library versions.
|
# requirements so we can directly link HA versions to these library versions.
|
||||||
anyio==4.3.0
|
anyio==4.3.0
|
||||||
h11==0.14.0
|
h11==0.14.0
|
||||||
httpcore==1.0.4
|
httpcore==1.0.5
|
||||||
|
|
||||||
# Ensure we have a hyperframe version that works in Python 3.10
|
# Ensure we have a hyperframe version that works in Python 3.10
|
||||||
# 5.2.0 fixed a collections abc deprecation
|
# 5.2.0 fixed a collections abc deprecation
|
||||||
|
@ -9,7 +9,6 @@ from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAI
|
|||||||
from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN
|
from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN
|
||||||
from homeassistant.components.homeworks.const import (
|
from homeassistant.components.homeworks.const import (
|
||||||
CONF_ADDR,
|
CONF_ADDR,
|
||||||
CONF_BUTTONS,
|
|
||||||
CONF_DIMMERS,
|
CONF_DIMMERS,
|
||||||
CONF_INDEX,
|
CONF_INDEX,
|
||||||
CONF_KEYPADS,
|
CONF_KEYPADS,
|
||||||
@ -161,26 +160,6 @@ async def test_import_flow(
|
|||||||
{
|
{
|
||||||
CONF_ADDR: "[02:08:02:01]",
|
CONF_ADDR: "[02:08:02:01]",
|
||||||
CONF_NAME: "Foyer Keypad",
|
CONF_NAME: "Foyer Keypad",
|
||||||
CONF_BUTTONS: [
|
|
||||||
{
|
|
||||||
CONF_NAME: "Morning",
|
|
||||||
CONF_NUMBER: 1,
|
|
||||||
CONF_LED: True,
|
|
||||||
CONF_RELEASE_DELAY: None,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
CONF_NAME: "Relax",
|
|
||||||
CONF_NUMBER: 2,
|
|
||||||
CONF_LED: True,
|
|
||||||
CONF_RELEASE_DELAY: None,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
CONF_NAME: "Dim up",
|
|
||||||
CONF_NUMBER: 3,
|
|
||||||
CONF_LED: False,
|
|
||||||
CONF_RELEASE_DELAY: 0.2,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
@ -207,16 +186,7 @@ async def test_import_flow(
|
|||||||
"keypads": [
|
"keypads": [
|
||||||
{
|
{
|
||||||
"addr": "[02:08:02:01]",
|
"addr": "[02:08:02:01]",
|
||||||
"buttons": [
|
"buttons": [],
|
||||||
{
|
|
||||||
"led": True,
|
|
||||||
"name": "Morning",
|
|
||||||
"number": 1,
|
|
||||||
"release_delay": None,
|
|
||||||
},
|
|
||||||
{"led": True, "name": "Relax", "number": 2, "release_delay": None},
|
|
||||||
{"led": False, "name": "Dim up", "number": 3, "release_delay": 0.2},
|
|
||||||
],
|
|
||||||
"name": "Foyer Keypad",
|
"name": "Foyer Keypad",
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
@ -574,8 +544,12 @@ async def test_options_add_remove_light_flow(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("keypad_address", ["[02:08:03:01]", "[02:08:03]"])
|
||||||
async def test_options_add_remove_keypad_flow(
|
async def test_options_add_remove_keypad_flow(
|
||||||
hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_homeworks: MagicMock
|
hass: HomeAssistant,
|
||||||
|
mock_config_entry: MockConfigEntry,
|
||||||
|
mock_homeworks: MagicMock,
|
||||||
|
keypad_address: str,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test options flow to add and remove a keypad."""
|
"""Test options flow to add and remove a keypad."""
|
||||||
mock_config_entry.add_to_hass(hass)
|
mock_config_entry.add_to_hass(hass)
|
||||||
@ -596,7 +570,7 @@ async def test_options_add_remove_keypad_flow(
|
|||||||
result = await hass.config_entries.options.async_configure(
|
result = await hass.config_entries.options.async_configure(
|
||||||
result["flow_id"],
|
result["flow_id"],
|
||||||
user_input={
|
user_input={
|
||||||
CONF_ADDR: "[02:08:03:01]",
|
CONF_ADDR: keypad_address,
|
||||||
CONF_NAME: "Hall Keypad",
|
CONF_NAME: "Hall Keypad",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -622,7 +596,7 @@ async def test_options_add_remove_keypad_flow(
|
|||||||
],
|
],
|
||||||
"name": "Foyer Keypad",
|
"name": "Foyer Keypad",
|
||||||
},
|
},
|
||||||
{"addr": "[02:08:03:01]", "buttons": [], "name": "Hall Keypad"},
|
{"addr": keypad_address, "buttons": [], "name": "Hall Keypad"},
|
||||||
],
|
],
|
||||||
"port": 1234,
|
"port": 1234,
|
||||||
}
|
}
|
||||||
@ -642,7 +616,7 @@ async def test_options_add_remove_keypad_flow(
|
|||||||
assert result["step_id"] == "remove_keypad"
|
assert result["step_id"] == "remove_keypad"
|
||||||
assert result["data_schema"].schema["index"].options == {
|
assert result["data_schema"].schema["index"].options == {
|
||||||
"0": "Foyer Keypad ([02:08:02:01])",
|
"0": "Foyer Keypad ([02:08:02:01])",
|
||||||
"1": "Hall Keypad ([02:08:03:01])",
|
"1": f"Hall Keypad ({keypad_address})",
|
||||||
}
|
}
|
||||||
|
|
||||||
result = await hass.config_entries.options.async_configure(
|
result = await hass.config_entries.options.async_configure(
|
||||||
@ -655,7 +629,7 @@ async def test_options_add_remove_keypad_flow(
|
|||||||
{"addr": "[02:08:01:01]", "name": "Foyer Sconces", "rate": 1.0},
|
{"addr": "[02:08:01:01]", "name": "Foyer Sconces", "rate": 1.0},
|
||||||
],
|
],
|
||||||
"host": "192.168.0.1",
|
"host": "192.168.0.1",
|
||||||
"keypads": [{"addr": "[02:08:03:01]", "buttons": [], "name": "Hall Keypad"}],
|
"keypads": [{"addr": keypad_address, "buttons": [], "name": "Hall Keypad"}],
|
||||||
"port": 1234,
|
"port": 1234,
|
||||||
}
|
}
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
@ -159,7 +159,6 @@ async def test_visible_effect_state_changes(hass: HomeAssistant) -> None:
|
|||||||
KEY_ACTIVE: True,
|
KEY_ACTIVE: True,
|
||||||
KEY_COMPONENTID: "COLOR",
|
KEY_COMPONENTID: "COLOR",
|
||||||
KEY_ORIGIN: "System",
|
KEY_ORIGIN: "System",
|
||||||
KEY_OWNER: "System",
|
|
||||||
KEY_PRIORITY: 250,
|
KEY_PRIORITY: 250,
|
||||||
KEY_VALUE: {KEY_RGB: [0, 0, 0]},
|
KEY_VALUE: {KEY_RGB: [0, 0, 0]},
|
||||||
KEY_VISIBLE: True,
|
KEY_VISIBLE: True,
|
||||||
|
@ -127,7 +127,12 @@ MOCK_VEHICLES = {
|
|||||||
{
|
{
|
||||||
ATTR_ENTITY_ID: "select.reg_number_charge_mode",
|
ATTR_ENTITY_ID: "select.reg_number_charge_mode",
|
||||||
ATTR_ICON: "mdi:calendar-remove",
|
ATTR_ICON: "mdi:calendar-remove",
|
||||||
ATTR_OPTIONS: ["always", "always_charging", "schedule_mode"],
|
ATTR_OPTIONS: [
|
||||||
|
"always",
|
||||||
|
"always_charging",
|
||||||
|
"schedule_mode",
|
||||||
|
"scheduled",
|
||||||
|
],
|
||||||
ATTR_STATE: "always",
|
ATTR_STATE: "always",
|
||||||
ATTR_UNIQUE_ID: "vf1aaaaa555777999_charge_mode",
|
ATTR_UNIQUE_ID: "vf1aaaaa555777999_charge_mode",
|
||||||
},
|
},
|
||||||
@ -363,7 +368,12 @@ MOCK_VEHICLES = {
|
|||||||
{
|
{
|
||||||
ATTR_ENTITY_ID: "select.reg_number_charge_mode",
|
ATTR_ENTITY_ID: "select.reg_number_charge_mode",
|
||||||
ATTR_ICON: "mdi:calendar-clock",
|
ATTR_ICON: "mdi:calendar-clock",
|
||||||
ATTR_OPTIONS: ["always", "always_charging", "schedule_mode"],
|
ATTR_OPTIONS: [
|
||||||
|
"always",
|
||||||
|
"always_charging",
|
||||||
|
"schedule_mode",
|
||||||
|
"scheduled",
|
||||||
|
],
|
||||||
ATTR_STATE: "schedule_mode",
|
ATTR_STATE: "schedule_mode",
|
||||||
ATTR_UNIQUE_ID: "vf1aaaaa555777999_charge_mode",
|
ATTR_UNIQUE_ID: "vf1aaaaa555777999_charge_mode",
|
||||||
},
|
},
|
||||||
@ -599,7 +609,12 @@ MOCK_VEHICLES = {
|
|||||||
{
|
{
|
||||||
ATTR_ENTITY_ID: "select.reg_number_charge_mode",
|
ATTR_ENTITY_ID: "select.reg_number_charge_mode",
|
||||||
ATTR_ICON: "mdi:calendar-remove",
|
ATTR_ICON: "mdi:calendar-remove",
|
||||||
ATTR_OPTIONS: ["always", "always_charging", "schedule_mode"],
|
ATTR_OPTIONS: [
|
||||||
|
"always",
|
||||||
|
"always_charging",
|
||||||
|
"schedule_mode",
|
||||||
|
"scheduled",
|
||||||
|
],
|
||||||
ATTR_STATE: "always",
|
ATTR_STATE: "always",
|
||||||
ATTR_UNIQUE_ID: "vf1aaaaa555777123_charge_mode",
|
ATTR_UNIQUE_ID: "vf1aaaaa555777123_charge_mode",
|
||||||
},
|
},
|
||||||
|
@ -82,6 +82,7 @@
|
|||||||
'always',
|
'always',
|
||||||
'always_charging',
|
'always_charging',
|
||||||
'schedule_mode',
|
'schedule_mode',
|
||||||
|
'scheduled',
|
||||||
]),
|
]),
|
||||||
}),
|
}),
|
||||||
'config_entry_id': <ANY>,
|
'config_entry_id': <ANY>,
|
||||||
@ -121,6 +122,7 @@
|
|||||||
'always',
|
'always',
|
||||||
'always_charging',
|
'always_charging',
|
||||||
'schedule_mode',
|
'schedule_mode',
|
||||||
|
'scheduled',
|
||||||
]),
|
]),
|
||||||
}),
|
}),
|
||||||
'context': <ANY>,
|
'context': <ANY>,
|
||||||
@ -175,6 +177,7 @@
|
|||||||
'always',
|
'always',
|
||||||
'always_charging',
|
'always_charging',
|
||||||
'schedule_mode',
|
'schedule_mode',
|
||||||
|
'scheduled',
|
||||||
]),
|
]),
|
||||||
}),
|
}),
|
||||||
'config_entry_id': <ANY>,
|
'config_entry_id': <ANY>,
|
||||||
@ -214,6 +217,7 @@
|
|||||||
'always',
|
'always',
|
||||||
'always_charging',
|
'always_charging',
|
||||||
'schedule_mode',
|
'schedule_mode',
|
||||||
|
'scheduled',
|
||||||
]),
|
]),
|
||||||
}),
|
}),
|
||||||
'context': <ANY>,
|
'context': <ANY>,
|
||||||
@ -268,6 +272,7 @@
|
|||||||
'always',
|
'always',
|
||||||
'always_charging',
|
'always_charging',
|
||||||
'schedule_mode',
|
'schedule_mode',
|
||||||
|
'scheduled',
|
||||||
]),
|
]),
|
||||||
}),
|
}),
|
||||||
'config_entry_id': <ANY>,
|
'config_entry_id': <ANY>,
|
||||||
@ -307,6 +312,7 @@
|
|||||||
'always',
|
'always',
|
||||||
'always_charging',
|
'always_charging',
|
||||||
'schedule_mode',
|
'schedule_mode',
|
||||||
|
'scheduled',
|
||||||
]),
|
]),
|
||||||
}),
|
}),
|
||||||
'context': <ANY>,
|
'context': <ANY>,
|
||||||
@ -401,6 +407,7 @@
|
|||||||
'always',
|
'always',
|
||||||
'always_charging',
|
'always_charging',
|
||||||
'schedule_mode',
|
'schedule_mode',
|
||||||
|
'scheduled',
|
||||||
]),
|
]),
|
||||||
}),
|
}),
|
||||||
'config_entry_id': <ANY>,
|
'config_entry_id': <ANY>,
|
||||||
@ -440,6 +447,7 @@
|
|||||||
'always',
|
'always',
|
||||||
'always_charging',
|
'always_charging',
|
||||||
'schedule_mode',
|
'schedule_mode',
|
||||||
|
'scheduled',
|
||||||
]),
|
]),
|
||||||
}),
|
}),
|
||||||
'context': <ANY>,
|
'context': <ANY>,
|
||||||
@ -494,6 +502,7 @@
|
|||||||
'always',
|
'always',
|
||||||
'always_charging',
|
'always_charging',
|
||||||
'schedule_mode',
|
'schedule_mode',
|
||||||
|
'scheduled',
|
||||||
]),
|
]),
|
||||||
}),
|
}),
|
||||||
'config_entry_id': <ANY>,
|
'config_entry_id': <ANY>,
|
||||||
@ -533,6 +542,7 @@
|
|||||||
'always',
|
'always',
|
||||||
'always_charging',
|
'always_charging',
|
||||||
'schedule_mode',
|
'schedule_mode',
|
||||||
|
'scheduled',
|
||||||
]),
|
]),
|
||||||
}),
|
}),
|
||||||
'context': <ANY>,
|
'context': <ANY>,
|
||||||
@ -587,6 +597,7 @@
|
|||||||
'always',
|
'always',
|
||||||
'always_charging',
|
'always_charging',
|
||||||
'schedule_mode',
|
'schedule_mode',
|
||||||
|
'scheduled',
|
||||||
]),
|
]),
|
||||||
}),
|
}),
|
||||||
'config_entry_id': <ANY>,
|
'config_entry_id': <ANY>,
|
||||||
@ -626,6 +637,7 @@
|
|||||||
'always',
|
'always',
|
||||||
'always_charging',
|
'always_charging',
|
||||||
'schedule_mode',
|
'schedule_mode',
|
||||||
|
'scheduled',
|
||||||
]),
|
]),
|
||||||
}),
|
}),
|
||||||
'context': <ANY>,
|
'context': <ANY>,
|
||||||
|
@ -9,7 +9,7 @@
|
|||||||
'TV',
|
'TV',
|
||||||
'HDMI',
|
'HDMI',
|
||||||
]),
|
]),
|
||||||
'supported_features': <MediaPlayerEntityFeature: 20413>,
|
'supported_features': <MediaPlayerEntityFeature: 24509>,
|
||||||
}),
|
}),
|
||||||
'context': <ANY>,
|
'context': <ANY>,
|
||||||
'entity_id': 'media_player.any',
|
'entity_id': 'media_player.any',
|
||||||
@ -51,7 +51,7 @@
|
|||||||
'original_name': None,
|
'original_name': None,
|
||||||
'platform': 'samsungtv',
|
'platform': 'samsungtv',
|
||||||
'previous_unique_id': None,
|
'previous_unique_id': None,
|
||||||
'supported_features': <MediaPlayerEntityFeature: 20413>,
|
'supported_features': <MediaPlayerEntityFeature: 24509>,
|
||||||
'translation_key': None,
|
'translation_key': None,
|
||||||
'unique_id': 'sample-entry-id',
|
'unique_id': 'sample-entry-id',
|
||||||
'unit_of_measurement': None,
|
'unit_of_measurement': None,
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -5,6 +5,7 @@ from typing import Any
|
|||||||
from unittest.mock import AsyncMock, MagicMock, patch
|
from unittest.mock import AsyncMock, MagicMock, patch
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
from aiohttp import ClientConnectionError
|
||||||
from aiohttp.hdrs import METH_HEAD
|
from aiohttp.hdrs import METH_HEAD
|
||||||
from aiowithings import (
|
from aiowithings import (
|
||||||
NotificationCategory,
|
NotificationCategory,
|
||||||
@ -508,6 +509,110 @@ async def test_cloud_disconnect(
|
|||||||
assert withings.subscribe_notification.call_count == 12
|
assert withings.subscribe_notification.call_count == 12
|
||||||
|
|
||||||
|
|
||||||
|
async def test_internet_disconnect(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
withings: AsyncMock,
|
||||||
|
webhook_config_entry: MockConfigEntry,
|
||||||
|
hass_client_no_auth: ClientSessionGenerator,
|
||||||
|
freezer: FrozenDateTimeFactory,
|
||||||
|
) -> None:
|
||||||
|
"""Test we can recover from internet disconnects."""
|
||||||
|
await mock_cloud(hass)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch("homeassistant.components.cloud.async_is_logged_in", return_value=True),
|
||||||
|
patch.object(cloud, "async_is_connected", return_value=True),
|
||||||
|
patch.object(cloud, "async_active_subscription", return_value=True),
|
||||||
|
patch(
|
||||||
|
"homeassistant.components.cloud.async_create_cloudhook",
|
||||||
|
return_value="https://hooks.nabu.casa/ABCD",
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"homeassistant.components.withings.async_get_config_entry_implementation",
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"homeassistant.components.cloud.async_delete_cloudhook",
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"homeassistant.components.withings.webhook_generate_url",
|
||||||
|
),
|
||||||
|
):
|
||||||
|
await setup_integration(hass, webhook_config_entry)
|
||||||
|
await prepare_webhook_setup(hass, freezer)
|
||||||
|
|
||||||
|
assert cloud.async_active_subscription(hass) is True
|
||||||
|
assert cloud.async_is_connected(hass) is True
|
||||||
|
assert withings.revoke_notification_configurations.call_count == 3
|
||||||
|
assert withings.subscribe_notification.call_count == 6
|
||||||
|
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
withings.list_notification_configurations.side_effect = ClientConnectionError
|
||||||
|
|
||||||
|
async_mock_cloud_connection_status(hass, False)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert withings.revoke_notification_configurations.call_count == 3
|
||||||
|
|
||||||
|
async_mock_cloud_connection_status(hass, True)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert withings.subscribe_notification.call_count == 12
|
||||||
|
|
||||||
|
|
||||||
|
async def test_cloud_disconnect_retry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
withings: AsyncMock,
|
||||||
|
webhook_config_entry: MockConfigEntry,
|
||||||
|
hass_client_no_auth: ClientSessionGenerator,
|
||||||
|
freezer: FrozenDateTimeFactory,
|
||||||
|
) -> None:
|
||||||
|
"""Test we retry to create webhook connection again after cloud disconnects."""
|
||||||
|
await mock_cloud(hass)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch("homeassistant.components.cloud.async_is_logged_in", return_value=True),
|
||||||
|
patch.object(cloud, "async_is_connected", return_value=True),
|
||||||
|
patch.object(
|
||||||
|
cloud, "async_active_subscription", return_value=True
|
||||||
|
) as mock_async_active_subscription,
|
||||||
|
patch(
|
||||||
|
"homeassistant.components.cloud.async_create_cloudhook",
|
||||||
|
return_value="https://hooks.nabu.casa/ABCD",
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"homeassistant.components.withings.async_get_config_entry_implementation",
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"homeassistant.components.cloud.async_delete_cloudhook",
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"homeassistant.components.withings.webhook_generate_url",
|
||||||
|
),
|
||||||
|
):
|
||||||
|
await setup_integration(hass, webhook_config_entry)
|
||||||
|
await prepare_webhook_setup(hass, freezer)
|
||||||
|
|
||||||
|
assert cloud.async_active_subscription(hass) is True
|
||||||
|
assert cloud.async_is_connected(hass) is True
|
||||||
|
assert mock_async_active_subscription.call_count == 3
|
||||||
|
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
async_mock_cloud_connection_status(hass, False)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert mock_async_active_subscription.call_count == 3
|
||||||
|
|
||||||
|
freezer.tick(timedelta(seconds=30))
|
||||||
|
async_fire_time_changed(hass)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert mock_async_active_subscription.call_count == 4
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
("body", "expected_code"),
|
("body", "expected_code"),
|
||||||
[
|
[
|
||||||
|
@ -1311,6 +1311,184 @@ async def test_wait_timeout(
|
|||||||
assert_action_trace(expected_trace)
|
assert_action_trace(expected_trace)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"timeout_param", [0, "{{ 0 }}", {"minutes": 0}, {"minutes": "{{ 0 }}"}]
|
||||||
|
)
|
||||||
|
async def test_wait_trigger_with_zero_timeout(
|
||||||
|
hass: HomeAssistant, caplog: pytest.LogCaptureFixture, timeout_param: int | str
|
||||||
|
) -> None:
|
||||||
|
"""Test the wait trigger with zero timeout option."""
|
||||||
|
event = "test_event"
|
||||||
|
events = async_capture_events(hass, event)
|
||||||
|
action = {
|
||||||
|
"wait_for_trigger": {
|
||||||
|
"platform": "state",
|
||||||
|
"entity_id": "switch.test",
|
||||||
|
"to": "off",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
action["timeout"] = timeout_param
|
||||||
|
action["continue_on_timeout"] = True
|
||||||
|
sequence = cv.SCRIPT_SCHEMA([action, {"event": event}])
|
||||||
|
sequence = await script.async_validate_actions_config(hass, sequence)
|
||||||
|
script_obj = script.Script(hass, sequence, "Test Name", "test_domain")
|
||||||
|
wait_started_flag = async_watch_for_action(script_obj, "wait")
|
||||||
|
hass.states.async_set("switch.test", "on")
|
||||||
|
hass.async_create_task(script_obj.async_run(context=Context()))
|
||||||
|
|
||||||
|
try:
|
||||||
|
await asyncio.wait_for(wait_started_flag.wait(), 1)
|
||||||
|
except (AssertionError, TimeoutError):
|
||||||
|
await script_obj.async_stop()
|
||||||
|
raise
|
||||||
|
|
||||||
|
assert not script_obj.is_running
|
||||||
|
assert len(events) == 1
|
||||||
|
assert "(timeout: 0:00:00)" in caplog.text
|
||||||
|
|
||||||
|
variable_wait = {"wait": {"trigger": None, "remaining": 0.0}}
|
||||||
|
expected_trace = {
|
||||||
|
"0": [
|
||||||
|
{
|
||||||
|
"result": variable_wait,
|
||||||
|
"variables": variable_wait,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"1": [{"result": {"event": "test_event", "event_data": {}}}],
|
||||||
|
}
|
||||||
|
assert_action_trace(expected_trace)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"timeout_param", [0, "{{ 0 }}", {"minutes": 0}, {"minutes": "{{ 0 }}"}]
|
||||||
|
)
|
||||||
|
async def test_wait_trigger_matches_with_zero_timeout(
|
||||||
|
hass: HomeAssistant, caplog: pytest.LogCaptureFixture, timeout_param: int | str
|
||||||
|
) -> None:
|
||||||
|
"""Test the wait trigger that matches with zero timeout option."""
|
||||||
|
event = "test_event"
|
||||||
|
events = async_capture_events(hass, event)
|
||||||
|
action = {
|
||||||
|
"wait_for_trigger": {
|
||||||
|
"platform": "state",
|
||||||
|
"entity_id": "switch.test",
|
||||||
|
"to": "off",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
action["timeout"] = timeout_param
|
||||||
|
action["continue_on_timeout"] = True
|
||||||
|
sequence = cv.SCRIPT_SCHEMA([action, {"event": event}])
|
||||||
|
sequence = await script.async_validate_actions_config(hass, sequence)
|
||||||
|
script_obj = script.Script(hass, sequence, "Test Name", "test_domain")
|
||||||
|
wait_started_flag = async_watch_for_action(script_obj, "wait")
|
||||||
|
hass.states.async_set("switch.test", "off")
|
||||||
|
hass.async_create_task(script_obj.async_run(context=Context()))
|
||||||
|
|
||||||
|
try:
|
||||||
|
await asyncio.wait_for(wait_started_flag.wait(), 1)
|
||||||
|
except (AssertionError, TimeoutError):
|
||||||
|
await script_obj.async_stop()
|
||||||
|
raise
|
||||||
|
|
||||||
|
assert not script_obj.is_running
|
||||||
|
assert len(events) == 1
|
||||||
|
assert "(timeout: 0:00:00)" in caplog.text
|
||||||
|
|
||||||
|
variable_wait = {"wait": {"trigger": None, "remaining": 0.0}}
|
||||||
|
expected_trace = {
|
||||||
|
"0": [
|
||||||
|
{
|
||||||
|
"result": variable_wait,
|
||||||
|
"variables": variable_wait,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"1": [{"result": {"event": "test_event", "event_data": {}}}],
|
||||||
|
}
|
||||||
|
assert_action_trace(expected_trace)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"timeout_param", [0, "{{ 0 }}", {"minutes": 0}, {"minutes": "{{ 0 }}"}]
|
||||||
|
)
|
||||||
|
async def test_wait_template_with_zero_timeout(
|
||||||
|
hass: HomeAssistant, caplog: pytest.LogCaptureFixture, timeout_param: int | str
|
||||||
|
) -> None:
|
||||||
|
"""Test the wait template with zero timeout option."""
|
||||||
|
event = "test_event"
|
||||||
|
events = async_capture_events(hass, event)
|
||||||
|
action = {"wait_template": "{{ states.switch.test.state == 'off' }}"}
|
||||||
|
action["timeout"] = timeout_param
|
||||||
|
action["continue_on_timeout"] = True
|
||||||
|
sequence = cv.SCRIPT_SCHEMA([action, {"event": event}])
|
||||||
|
sequence = await script.async_validate_actions_config(hass, sequence)
|
||||||
|
script_obj = script.Script(hass, sequence, "Test Name", "test_domain")
|
||||||
|
wait_started_flag = async_watch_for_action(script_obj, "wait")
|
||||||
|
hass.states.async_set("switch.test", "on")
|
||||||
|
hass.async_create_task(script_obj.async_run(context=Context()))
|
||||||
|
|
||||||
|
try:
|
||||||
|
await asyncio.wait_for(wait_started_flag.wait(), 1)
|
||||||
|
except (AssertionError, TimeoutError):
|
||||||
|
await script_obj.async_stop()
|
||||||
|
raise
|
||||||
|
|
||||||
|
assert not script_obj.is_running
|
||||||
|
assert len(events) == 1
|
||||||
|
assert "(timeout: 0:00:00)" in caplog.text
|
||||||
|
variable_wait = {"wait": {"completed": False, "remaining": 0.0}}
|
||||||
|
expected_trace = {
|
||||||
|
"0": [
|
||||||
|
{
|
||||||
|
"result": variable_wait,
|
||||||
|
"variables": variable_wait,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"1": [{"result": {"event": "test_event", "event_data": {}}}],
|
||||||
|
}
|
||||||
|
assert_action_trace(expected_trace)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"timeout_param", [0, "{{ 0 }}", {"minutes": 0}, {"minutes": "{{ 0 }}"}]
|
||||||
|
)
|
||||||
|
async def test_wait_template_matches_with_zero_timeout(
|
||||||
|
hass: HomeAssistant, caplog: pytest.LogCaptureFixture, timeout_param: int | str
|
||||||
|
) -> None:
|
||||||
|
"""Test the wait template that matches with zero timeout option."""
|
||||||
|
event = "test_event"
|
||||||
|
events = async_capture_events(hass, event)
|
||||||
|
action = {"wait_template": "{{ states.switch.test.state == 'off' }}"}
|
||||||
|
action["timeout"] = timeout_param
|
||||||
|
action["continue_on_timeout"] = True
|
||||||
|
sequence = cv.SCRIPT_SCHEMA([action, {"event": event}])
|
||||||
|
sequence = await script.async_validate_actions_config(hass, sequence)
|
||||||
|
script_obj = script.Script(hass, sequence, "Test Name", "test_domain")
|
||||||
|
wait_started_flag = async_watch_for_action(script_obj, "wait")
|
||||||
|
hass.states.async_set("switch.test", "off")
|
||||||
|
hass.async_create_task(script_obj.async_run(context=Context()))
|
||||||
|
|
||||||
|
try:
|
||||||
|
await asyncio.wait_for(wait_started_flag.wait(), 1)
|
||||||
|
except (AssertionError, TimeoutError):
|
||||||
|
await script_obj.async_stop()
|
||||||
|
raise
|
||||||
|
|
||||||
|
assert not script_obj.is_running
|
||||||
|
assert len(events) == 1
|
||||||
|
assert "(timeout: 0:00:00)" in caplog.text
|
||||||
|
variable_wait = {"wait": {"completed": True, "remaining": 0.0}}
|
||||||
|
expected_trace = {
|
||||||
|
"0": [
|
||||||
|
{
|
||||||
|
"result": variable_wait,
|
||||||
|
"variables": variable_wait,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"1": [{"result": {"event": "test_event", "event_data": {}}}],
|
||||||
|
}
|
||||||
|
assert_action_trace(expected_trace)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
("continue_on_timeout", "n_events"), [(False, 0), (True, 1), (None, 1)]
|
("continue_on_timeout", "n_events"), [(False, 0), (True, 1), (None, 1)]
|
||||||
)
|
)
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
"""Tests for the update coordinator."""
|
"""Tests for the update coordinator."""
|
||||||
|
|
||||||
from datetime import timedelta
|
from datetime import datetime, timedelta
|
||||||
import logging
|
import logging
|
||||||
from unittest.mock import AsyncMock, Mock, patch
|
from unittest.mock import AsyncMock, Mock, patch
|
||||||
import urllib.error
|
import urllib.error
|
||||||
@ -12,7 +12,7 @@ import requests
|
|||||||
|
|
||||||
from homeassistant import config_entries
|
from homeassistant import config_entries
|
||||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
||||||
from homeassistant.core import CoreState, HomeAssistant
|
from homeassistant.core import CoreState, HomeAssistant, callback
|
||||||
from homeassistant.exceptions import ConfigEntryNotReady
|
from homeassistant.exceptions import ConfigEntryNotReady
|
||||||
from homeassistant.helpers import update_coordinator
|
from homeassistant.helpers import update_coordinator
|
||||||
from homeassistant.util.dt import utcnow
|
from homeassistant.util.dt import utcnow
|
||||||
@ -716,3 +716,35 @@ async def test_always_callback_when_always_update_is_true(
|
|||||||
update_callback.reset_mock()
|
update_callback.reset_mock()
|
||||||
|
|
||||||
remove_callbacks()
|
remove_callbacks()
|
||||||
|
|
||||||
|
|
||||||
|
async def test_timestamp_date_update_coordinator(hass: HomeAssistant) -> None:
|
||||||
|
"""Test last_update_success_time is set before calling listeners."""
|
||||||
|
last_update_success_times: list[datetime | None] = []
|
||||||
|
|
||||||
|
async def refresh() -> int:
|
||||||
|
return 1
|
||||||
|
|
||||||
|
crd = update_coordinator.TimestampDataUpdateCoordinator[int](
|
||||||
|
hass,
|
||||||
|
_LOGGER,
|
||||||
|
name="test",
|
||||||
|
update_method=refresh,
|
||||||
|
update_interval=timedelta(seconds=10),
|
||||||
|
)
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def listener():
|
||||||
|
last_update_success_times.append(crd.last_update_success_time)
|
||||||
|
|
||||||
|
unsub = crd.async_add_listener(listener)
|
||||||
|
|
||||||
|
await crd.async_refresh()
|
||||||
|
|
||||||
|
assert len(last_update_success_times) == 1
|
||||||
|
# Ensure the time is set before the listener is called
|
||||||
|
assert last_update_success_times != [None]
|
||||||
|
|
||||||
|
unsub()
|
||||||
|
await crd.async_refresh()
|
||||||
|
assert len(last_update_success_times) == 1
|
||||||
|
Loading…
x
Reference in New Issue
Block a user