mirror of
https://github.com/home-assistant/core.git
synced 2025-07-22 12:47:08 +00:00
2023.4.5 (#91544)
This commit is contained in:
commit
b05fcd7904
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/environment_canada",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["env_canada"],
|
||||
"requirements": ["env_canada==0.5.32"]
|
||||
"requirements": ["env_canada==0.5.33"]
|
||||
}
|
||||
|
@ -95,6 +95,7 @@ HA_OPMODES_HVAC = {
|
||||
HVACMode.COOL: 2,
|
||||
HVACMode.AUTO: 3,
|
||||
HVACMode.FAN_ONLY: 6,
|
||||
HVACMode.DRY: 8,
|
||||
}
|
||||
|
||||
TARGET_TEMP_ACTIONS = (
|
||||
|
@ -1,5 +1,7 @@
|
||||
"""The ONVIF integration."""
|
||||
from httpx import RequestError
|
||||
from onvif.exceptions import ONVIFAuthError, ONVIFError, ONVIFTimeoutError
|
||||
from zeep.exceptions import Fault
|
||||
|
||||
from homeassistant.components.ffmpeg import CONF_EXTRA_ARGUMENTS
|
||||
from homeassistant.components.stream import CONF_RTSP_TRANSPORT, RTSP_TRANSPORTS
|
||||
@ -27,9 +29,25 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
device = ONVIFDevice(hass, entry)
|
||||
|
||||
if not await device.async_setup():
|
||||
try:
|
||||
await device.async_setup()
|
||||
except RequestError as err:
|
||||
await device.device.close()
|
||||
return False
|
||||
raise ConfigEntryNotReady(
|
||||
f"Could not connect to camera {device.device.host}:{device.device.port}: {err}"
|
||||
) from err
|
||||
except Fault as err:
|
||||
await device.device.close()
|
||||
# We do no know if the credentials are wrong or the camera is
|
||||
# still booting up, so we will retry later
|
||||
raise ConfigEntryNotReady(
|
||||
f"Could not connect to camera, verify credentials are correct: {err}"
|
||||
) from err
|
||||
except ONVIFError as err:
|
||||
await device.device.close()
|
||||
raise ConfigEntryNotReady(
|
||||
f"Could not setup camera {device.device.host}:{device.device.port}: {err}"
|
||||
) from err
|
||||
|
||||
if not device.available:
|
||||
raise ConfigEntryNotReady()
|
||||
@ -39,15 +57,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
hass.data[DOMAIN][entry.unique_id] = device
|
||||
|
||||
platforms = [Platform.BUTTON, Platform.CAMERA]
|
||||
device.platforms = [Platform.BUTTON, Platform.CAMERA]
|
||||
|
||||
if device.capabilities.events:
|
||||
platforms += [Platform.BINARY_SENSOR, Platform.SENSOR]
|
||||
device.platforms += [Platform.BINARY_SENSOR, Platform.SENSOR]
|
||||
|
||||
if device.capabilities.imaging:
|
||||
platforms += [Platform.SWITCH]
|
||||
device.platforms += [Platform.SWITCH]
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, platforms)
|
||||
await hass.config_entries.async_forward_entry_setups(entry, device.platforms)
|
||||
|
||||
entry.async_on_unload(
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, device.async_stop)
|
||||
@ -59,16 +77,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
|
||||
device = hass.data[DOMAIN][entry.unique_id]
|
||||
platforms = ["camera"]
|
||||
device: ONVIFDevice = hass.data[DOMAIN][entry.unique_id]
|
||||
|
||||
if device.capabilities.events and device.events.started:
|
||||
platforms += [Platform.BINARY_SENSOR, Platform.SENSOR]
|
||||
await device.events.async_stop()
|
||||
if device.capabilities.imaging:
|
||||
platforms += [Platform.SWITCH]
|
||||
|
||||
return await hass.config_entries.async_unload_platforms(entry, platforms)
|
||||
return await hass.config_entries.async_unload_platforms(entry, device.platforms)
|
||||
|
||||
|
||||
async def _get_snapshot_auth(device):
|
||||
|
@ -20,6 +20,7 @@ from homeassistant.const import (
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
CONF_USERNAME,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
import homeassistant.util.dt as dt_util
|
||||
@ -55,6 +56,7 @@ class ONVIFDevice:
|
||||
self.capabilities: Capabilities = Capabilities()
|
||||
self.profiles: list[Profile] = []
|
||||
self.max_resolution: int = 0
|
||||
self.platforms: list[Platform] = []
|
||||
|
||||
self._dt_diff_seconds: float = 0
|
||||
|
||||
@ -83,7 +85,7 @@ class ONVIFDevice:
|
||||
"""Return the password of this device."""
|
||||
return self.config_entry.data[CONF_PASSWORD]
|
||||
|
||||
async def async_setup(self) -> bool:
|
||||
async def async_setup(self) -> None:
|
||||
"""Set up the device."""
|
||||
self.device = get_device(
|
||||
self.hass,
|
||||
@ -94,15 +96,12 @@ class ONVIFDevice:
|
||||
)
|
||||
|
||||
# Get all device info
|
||||
try:
|
||||
await self.device.update_xaddrs()
|
||||
await self.async_check_date_and_time()
|
||||
|
||||
# Create event manager
|
||||
assert self.config_entry.unique_id
|
||||
self.events = EventManager(
|
||||
self.hass, self.device, self.config_entry.unique_id
|
||||
)
|
||||
self.events = EventManager(self.hass, self.device, self.config_entry.unique_id)
|
||||
|
||||
# Fetch basic device info and capabilities
|
||||
self.info = await self.async_get_device_info()
|
||||
@ -114,7 +113,7 @@ class ONVIFDevice:
|
||||
|
||||
# No camera profiles to add
|
||||
if not self.profiles:
|
||||
return False
|
||||
raise ONVIFError("No camera profiles found")
|
||||
|
||||
if self.capabilities.ptz:
|
||||
self.device.create_ptz_service()
|
||||
@ -125,26 +124,6 @@ class ONVIFDevice:
|
||||
for profile in self.profiles
|
||||
if profile.video.encoding == "H264"
|
||||
)
|
||||
except RequestError as err:
|
||||
LOGGER.warning(
|
||||
"Couldn't connect to camera '%s', but will retry later. Error: %s",
|
||||
self.name,
|
||||
err,
|
||||
)
|
||||
self.available = False
|
||||
await self.device.close()
|
||||
except Fault as err:
|
||||
LOGGER.error(
|
||||
(
|
||||
"Couldn't connect to camera '%s', please verify "
|
||||
"that the credentials are correct. Error: %s"
|
||||
),
|
||||
self.name,
|
||||
err,
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def async_stop(self, event=None):
|
||||
"""Shut it all down."""
|
||||
|
@ -20,11 +20,9 @@ from .models import Event
|
||||
from .parsers import PARSERS
|
||||
|
||||
UNHANDLED_TOPICS: set[str] = set()
|
||||
SUBSCRIPTION_ERRORS = (
|
||||
Fault,
|
||||
asyncio.TimeoutError,
|
||||
TransportError,
|
||||
)
|
||||
|
||||
SUBSCRIPTION_ERRORS = (Fault, asyncio.TimeoutError, TransportError)
|
||||
SET_SYNCHRONIZATION_POINT_ERRORS = (*SUBSCRIPTION_ERRORS, TypeError)
|
||||
|
||||
|
||||
def _stringify_onvif_error(error: Exception) -> str:
|
||||
@ -34,6 +32,15 @@ def _stringify_onvif_error(error: Exception) -> str:
|
||||
return str(error)
|
||||
|
||||
|
||||
def _get_next_termination_time() -> str:
|
||||
"""Get next termination time."""
|
||||
return (
|
||||
(dt_util.utcnow() + dt.timedelta(days=1))
|
||||
.isoformat(timespec="seconds")
|
||||
.replace("+00:00", "Z")
|
||||
)
|
||||
|
||||
|
||||
class EventManager:
|
||||
"""ONVIF Event Manager."""
|
||||
|
||||
@ -86,7 +93,9 @@ class EventManager:
|
||||
|
||||
async def async_start(self) -> bool:
|
||||
"""Start polling events."""
|
||||
if not await self.device.create_pullpoint_subscription():
|
||||
if not await self.device.create_pullpoint_subscription(
|
||||
{"InitialTerminationTime": _get_next_termination_time()}
|
||||
):
|
||||
return False
|
||||
|
||||
# Create subscription manager
|
||||
@ -99,7 +108,7 @@ class EventManager:
|
||||
|
||||
# Initialize events
|
||||
pullpoint = self.device.create_pullpoint_service()
|
||||
with suppress(*SUBSCRIPTION_ERRORS):
|
||||
with suppress(*SET_SYNCHRONIZATION_POINT_ERRORS):
|
||||
await pullpoint.SetSynchronizationPoint()
|
||||
response = await pullpoint.PullMessages(
|
||||
{"MessageLimit": 100, "Timeout": dt.timedelta(seconds=5)}
|
||||
@ -173,16 +182,11 @@ class EventManager:
|
||||
if not self._subscription:
|
||||
return
|
||||
|
||||
termination_time = (
|
||||
(dt_util.utcnow() + dt.timedelta(days=1))
|
||||
.isoformat(timespec="seconds")
|
||||
.replace("+00:00", "Z")
|
||||
)
|
||||
with suppress(*SUBSCRIPTION_ERRORS):
|
||||
# The first time we renew, we may get a Fault error so we
|
||||
# suppress it. The subscription will be restarted in
|
||||
# async_restart later.
|
||||
await self._subscription.Renew(termination_time)
|
||||
await self._subscription.Renew(_get_next_termination_time())
|
||||
|
||||
def async_schedule_pull(self) -> None:
|
||||
"""Schedule async_pull_messages to run."""
|
||||
|
@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/onvif",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["onvif", "wsdiscovery", "zeep"],
|
||||
"requirements": ["onvif-zeep-async==1.2.3", "WSDiscovery==2.0.0"]
|
||||
"requirements": ["onvif-zeep-async==1.2.11", "WSDiscovery==2.0.0"]
|
||||
}
|
||||
|
@ -357,34 +357,77 @@ class ReolinkHost:
|
||||
|
||||
async def handle_webhook(
|
||||
self, hass: HomeAssistant, webhook_id: str, request: Request
|
||||
):
|
||||
"""Shield the incoming webhook callback from cancellation."""
|
||||
await asyncio.shield(self.handle_webhook_shielded(hass, webhook_id, request))
|
||||
|
||||
async def handle_webhook_shielded(
|
||||
self, hass: HomeAssistant, webhook_id: str, request: Request
|
||||
):
|
||||
"""Handle incoming webhook from Reolink for inbound messages and calls."""
|
||||
|
||||
) -> None:
|
||||
"""Read the incoming webhook from Reolink for inbound messages and schedule processing."""
|
||||
_LOGGER.debug("Webhook '%s' called", webhook_id)
|
||||
if not self._webhook_reachable.is_set():
|
||||
self._webhook_reachable.set()
|
||||
|
||||
if not request.body_exists:
|
||||
_LOGGER.debug("Webhook '%s' triggered without payload", webhook_id)
|
||||
return
|
||||
|
||||
data = await request.text()
|
||||
data: bytes | None = None
|
||||
try:
|
||||
data = await request.read()
|
||||
if not data:
|
||||
_LOGGER.debug(
|
||||
"Webhook '%s' triggered with unknown payload: %s", webhook_id, data
|
||||
)
|
||||
except ConnectionResetError:
|
||||
_LOGGER.debug(
|
||||
"Webhook '%s' called, but lost connection before reading message "
|
||||
"(ConnectionResetError), issuing poll",
|
||||
webhook_id,
|
||||
)
|
||||
return
|
||||
except aiohttp.ClientResponseError:
|
||||
_LOGGER.debug(
|
||||
"Webhook '%s' called, but could not read the message, issuing poll",
|
||||
webhook_id,
|
||||
)
|
||||
return
|
||||
except asyncio.CancelledError:
|
||||
_LOGGER.debug(
|
||||
"Webhook '%s' called, but lost connection before reading message "
|
||||
"(CancelledError), issuing poll",
|
||||
webhook_id,
|
||||
)
|
||||
raise
|
||||
finally:
|
||||
# We want handle_webhook to return as soon as possible
|
||||
# so we process the data in the background, this also shields from cancellation
|
||||
hass.async_create_background_task(
|
||||
self._process_webhook_data(hass, webhook_id, data),
|
||||
"Process Reolink webhook",
|
||||
)
|
||||
|
||||
async def _process_webhook_data(
|
||||
self, hass: HomeAssistant, webhook_id: str, data: bytes | None
|
||||
) -> None:
|
||||
"""Process the data from the Reolink webhook."""
|
||||
# This task is executed in the background so we need to catch exceptions
|
||||
# and log them
|
||||
if not self._webhook_reachable.is_set():
|
||||
self._webhook_reachable.set()
|
||||
ir.async_delete_issue(self._hass, DOMAIN, "webhook_url")
|
||||
|
||||
try:
|
||||
if not data:
|
||||
if not await self._api.get_motion_state_all_ch():
|
||||
_LOGGER.error(
|
||||
"Could not poll motion state after losing connection during receiving ONVIF event"
|
||||
)
|
||||
return
|
||||
async_dispatcher_send(hass, f"{webhook_id}_all", {})
|
||||
return
|
||||
|
||||
channels = await self._api.ONVIF_event_callback(data)
|
||||
message = data.decode("utf-8")
|
||||
channels = await self._api.ONVIF_event_callback(message)
|
||||
except Exception as ex: # pylint: disable=broad-except
|
||||
_LOGGER.exception(
|
||||
"Error processing ONVIF event for Reolink %s: %s",
|
||||
self._api.nvr_name,
|
||||
ex,
|
||||
)
|
||||
return
|
||||
|
||||
if channels is None:
|
||||
async_dispatcher_send(hass, f"{webhook_id}_all", {})
|
||||
else:
|
||||
return
|
||||
|
||||
for channel in channels:
|
||||
async_dispatcher_send(hass, f"{webhook_id}_{channel}", {})
|
||||
|
@ -2,6 +2,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from async_timeout import timeout
|
||||
from sharkiq import (
|
||||
@ -60,6 +61,13 @@ class SharkIqUpdateCoordinator(DataUpdateCoordinator[bool]):
|
||||
async def _async_update_data(self) -> bool:
|
||||
"""Update data device by device."""
|
||||
try:
|
||||
if self.ayla_api.token_expiring_soon:
|
||||
await self.ayla_api.async_refresh_auth()
|
||||
elif datetime.now() > self.ayla_api.auth_expiration - timedelta(
|
||||
seconds=600
|
||||
):
|
||||
await self.ayla_api.async_refresh_auth()
|
||||
|
||||
all_vacuums = await self.ayla_api.async_list_devices()
|
||||
self._online_dsns = {
|
||||
v["dsn"]
|
||||
@ -78,7 +86,7 @@ class SharkIqUpdateCoordinator(DataUpdateCoordinator[bool]):
|
||||
LOGGER.debug("Bad auth state. Attempting re-auth", exc_info=err)
|
||||
raise ConfigEntryAuthFailed from err
|
||||
except Exception as err:
|
||||
LOGGER.exception("Unexpected error updating SharkIQ")
|
||||
LOGGER.exception("Unexpected error updating SharkIQ. Attempting re-auth")
|
||||
raise UpdateFailed(err) from err
|
||||
|
||||
return True
|
||||
|
@ -40,5 +40,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/switchbot",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["switchbot"],
|
||||
"requirements": ["PySwitchbot==0.37.5"]
|
||||
"requirements": ["PySwitchbot==0.37.6"]
|
||||
}
|
||||
|
@ -188,7 +188,10 @@ class SystemBridgeDataUpdateCoordinator(
|
||||
session=async_get_clientsession(self.hass),
|
||||
)
|
||||
|
||||
self.hass.async_create_task(self._listen_for_data())
|
||||
self.hass.async_create_background_task(
|
||||
self._listen_for_data(),
|
||||
name="System Bridge WebSocket Listener",
|
||||
)
|
||||
|
||||
await self.websocket_client.register_data_listener(
|
||||
RegisterDataListener(modules=MODULES)
|
||||
|
@ -41,7 +41,7 @@ set_water_heater_timer:
|
||||
target:
|
||||
entity:
|
||||
integration: tado
|
||||
domain: climate
|
||||
domain: water_heater
|
||||
fields:
|
||||
time_period:
|
||||
name: Time period
|
||||
|
@ -82,7 +82,7 @@ class DatasetStore:
|
||||
"""Initialize the dataset store."""
|
||||
self.hass = hass
|
||||
self.datasets: dict[str, DatasetEntry] = {}
|
||||
self.preferred_dataset: str | None = None
|
||||
self._preferred_dataset: str | None = None
|
||||
self._store: Store[dict[str, Any]] = Store(
|
||||
hass,
|
||||
STORAGE_VERSION_MAJOR,
|
||||
@ -103,14 +103,14 @@ class DatasetStore:
|
||||
entry = DatasetEntry(source=source, tlv=tlv)
|
||||
self.datasets[entry.id] = entry
|
||||
# Set to preferred if there is no preferred dataset
|
||||
if self.preferred_dataset is None:
|
||||
self.preferred_dataset = entry.id
|
||||
if self._preferred_dataset is None:
|
||||
self._preferred_dataset = entry.id
|
||||
self.async_schedule_save()
|
||||
|
||||
@callback
|
||||
def async_delete(self, dataset_id: str) -> None:
|
||||
"""Delete dataset."""
|
||||
if self.preferred_dataset == dataset_id:
|
||||
if self._preferred_dataset == dataset_id:
|
||||
raise DatasetPreferredError("attempt to remove preferred dataset")
|
||||
del self.datasets[dataset_id]
|
||||
self.async_schedule_save()
|
||||
@ -120,6 +120,21 @@ class DatasetStore:
|
||||
"""Get dataset by id."""
|
||||
return self.datasets.get(dataset_id)
|
||||
|
||||
@property
|
||||
@callback
|
||||
def preferred_dataset(self) -> str | None:
|
||||
"""Get the id of the preferred dataset."""
|
||||
return self._preferred_dataset
|
||||
|
||||
@preferred_dataset.setter
|
||||
@callback
|
||||
def preferred_dataset(self, dataset_id: str) -> None:
|
||||
"""Set the preferred dataset."""
|
||||
if dataset_id not in self.datasets:
|
||||
raise KeyError("unknown dataset")
|
||||
self._preferred_dataset = dataset_id
|
||||
self.async_schedule_save()
|
||||
|
||||
async def async_load(self) -> None:
|
||||
"""Load the datasets."""
|
||||
data = await self._store.async_load()
|
||||
@ -139,7 +154,7 @@ class DatasetStore:
|
||||
preferred_dataset = data["preferred_dataset"]
|
||||
|
||||
self.datasets = datasets
|
||||
self.preferred_dataset = preferred_dataset
|
||||
self._preferred_dataset = preferred_dataset
|
||||
|
||||
@callback
|
||||
def async_schedule_save(self) -> None:
|
||||
@ -151,7 +166,7 @@ class DatasetStore:
|
||||
"""Return data of datasets to store in a file."""
|
||||
data: dict[str, Any] = {}
|
||||
data["datasets"] = [dataset.to_json() for dataset in self.datasets.values()]
|
||||
data["preferred_dataset"] = self.preferred_dataset
|
||||
data["preferred_dataset"] = self._preferred_dataset
|
||||
return data
|
||||
|
||||
|
||||
|
@ -65,13 +65,14 @@ async def ws_set_preferred_dataset(
|
||||
dataset_id = msg["dataset_id"]
|
||||
|
||||
store = await dataset_store.async_get_store(hass)
|
||||
if not (store.async_get(dataset_id)):
|
||||
try:
|
||||
store.preferred_dataset = dataset_id
|
||||
except KeyError:
|
||||
connection.send_error(
|
||||
msg["id"], websocket_api.const.ERR_NOT_FOUND, "unknown dataset"
|
||||
)
|
||||
return
|
||||
|
||||
store.preferred_dataset = dataset_id
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
|
@ -3,6 +3,7 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import CookieJar
|
||||
@ -28,6 +29,7 @@ from homeassistant.helpers.aiohttp_client import (
|
||||
async_create_clientsession,
|
||||
async_get_clientsession,
|
||||
)
|
||||
from homeassistant.helpers.storage import STORAGE_DIR
|
||||
from homeassistant.helpers.typing import DiscoveryInfoType
|
||||
from homeassistant.loader import async_get_integration
|
||||
from homeassistant.util.network import is_ip_address
|
||||
@ -248,6 +250,7 @@ class ProtectFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
verify_ssl=verify_ssl,
|
||||
cache_dir=Path(self.hass.config.path(STORAGE_DIR, "unifiprotect_cache")),
|
||||
)
|
||||
|
||||
errors = {}
|
||||
|
@ -41,7 +41,7 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyunifiprotect", "unifi_discovery"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pyunifiprotect==4.7.0", "unifi-discovery==1.1.7"],
|
||||
"requirements": ["pyunifiprotect==4.8.1", "unifi-discovery==1.1.7"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Ubiquiti Networks",
|
||||
|
@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
from collections.abc import Generator, Iterable
|
||||
import contextlib
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
import socket
|
||||
from typing import Any
|
||||
|
||||
@ -27,6 +28,7 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
from homeassistant.helpers.storage import STORAGE_DIR
|
||||
|
||||
from .const import (
|
||||
CONF_ALL_UPDATES,
|
||||
@ -142,4 +144,5 @@ def async_create_api_client(
|
||||
override_connection_host=entry.options.get(CONF_OVERRIDE_CHOST, False),
|
||||
ignore_stats=not entry.options.get(CONF_ALL_UPDATES, False),
|
||||
ignore_unadopted=False,
|
||||
cache_dir=Path(hass.config.path(STORAGE_DIR, "unifiprotect_cache")),
|
||||
)
|
||||
|
@ -49,8 +49,8 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class AttrReportConfig(TypedDict, total=True):
|
||||
"""Configuration to report for the attributes."""
|
||||
|
||||
# Could be either an attribute name or attribute id
|
||||
attr: str | int
|
||||
# An attribute name
|
||||
attr: str
|
||||
# The config for the attribute reporting configuration consists of a tuple for
|
||||
# (minimum_reported_time_interval_s, maximum_reported_time_interval_s, value_delta)
|
||||
config: tuple[int, int, int | float]
|
||||
@ -130,15 +130,13 @@ class ZigbeeChannel(LogMixin):
|
||||
unique_id = ch_pool.unique_id.replace("-", ":")
|
||||
self._unique_id = f"{unique_id}:0x{cluster.cluster_id:04x}"
|
||||
if not hasattr(self, "_value_attribute") and self.REPORT_CONFIG:
|
||||
attr = self.REPORT_CONFIG[0].get("attr")
|
||||
if isinstance(attr, str):
|
||||
attribute: ZCLAttributeDef = self.cluster.attributes_by_name.get(attr)
|
||||
if attribute is not None:
|
||||
self.value_attribute = attribute.id
|
||||
attr_def: ZCLAttributeDef | None = self.cluster.attributes_by_name.get(
|
||||
self.REPORT_CONFIG[0]["attr"]
|
||||
)
|
||||
if attr_def is not None:
|
||||
self.value_attribute = attr_def.id
|
||||
else:
|
||||
self.value_attribute = None
|
||||
else:
|
||||
self.value_attribute = attr
|
||||
self._status = ChannelStatus.CREATED
|
||||
self._cluster.add_listener(self)
|
||||
self.data_cache: dict[str, Enum] = {}
|
||||
@ -233,7 +231,12 @@ class ZigbeeChannel(LogMixin):
|
||||
|
||||
for attr_report in self.REPORT_CONFIG:
|
||||
attr, config = attr_report["attr"], attr_report["config"]
|
||||
attr_name = self.cluster.attributes.get(attr, [attr])[0]
|
||||
|
||||
try:
|
||||
attr_name = self.cluster.find_attribute(attr).name
|
||||
except KeyError:
|
||||
attr_name = attr
|
||||
|
||||
event_data[attr_name] = {
|
||||
"min": config[0],
|
||||
"max": config[1],
|
||||
@ -282,7 +285,7 @@ class ZigbeeChannel(LogMixin):
|
||||
)
|
||||
|
||||
def _configure_reporting_status(
|
||||
self, attrs: dict[int | str, tuple[int, int, float | int]], res: list | tuple
|
||||
self, attrs: dict[str, tuple[int, int, float | int]], res: list | tuple
|
||||
) -> None:
|
||||
"""Parse configure reporting result."""
|
||||
if isinstance(res, (Exception, ConfigureReportingResponseRecord)):
|
||||
@ -304,14 +307,14 @@ class ZigbeeChannel(LogMixin):
|
||||
return
|
||||
|
||||
failed = [
|
||||
self.cluster.attributes.get(r.attrid, [r.attrid])[0]
|
||||
for r in res
|
||||
if r.status != Status.SUCCESS
|
||||
self.cluster.find_attribute(record.attrid).name
|
||||
for record in res
|
||||
if record.status != Status.SUCCESS
|
||||
]
|
||||
attributes = {self.cluster.attributes.get(r, [r])[0] for r in attrs}
|
||||
|
||||
self.debug(
|
||||
"Successfully configured reporting for '%s' on '%s' cluster",
|
||||
attributes - set(failed),
|
||||
set(attrs) - set(failed),
|
||||
self.name,
|
||||
)
|
||||
self.debug(
|
||||
|
@ -8,7 +8,7 @@ from .backports.enum import StrEnum
|
||||
APPLICATION_NAME: Final = "HomeAssistant"
|
||||
MAJOR_VERSION: Final = 2023
|
||||
MINOR_VERSION: Final = 4
|
||||
PATCH_VERSION: Final = "4"
|
||||
PATCH_VERSION: Final = "5"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 10, 0)
|
||||
|
@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "homeassistant"
|
||||
version = "2023.4.4"
|
||||
version = "2023.4.5"
|
||||
license = {text = "Apache-2.0"}
|
||||
description = "Open-source home automation platform running on Python 3."
|
||||
readme = "README.rst"
|
||||
|
@ -40,7 +40,7 @@ PyRMVtransport==0.3.3
|
||||
PySocks==1.7.1
|
||||
|
||||
# homeassistant.components.switchbot
|
||||
PySwitchbot==0.37.5
|
||||
PySwitchbot==0.37.6
|
||||
|
||||
# homeassistant.components.transport_nsw
|
||||
PyTransportNSW==0.1.1
|
||||
@ -661,7 +661,7 @@ enocean==0.50
|
||||
enturclient==0.2.4
|
||||
|
||||
# homeassistant.components.environment_canada
|
||||
env_canada==0.5.32
|
||||
env_canada==0.5.33
|
||||
|
||||
# homeassistant.components.enphase_envoy
|
||||
envoy_reader==0.20.1
|
||||
@ -1260,7 +1260,7 @@ ondilo==0.2.0
|
||||
onkyo-eiscp==1.2.7
|
||||
|
||||
# homeassistant.components.onvif
|
||||
onvif-zeep-async==1.2.3
|
||||
onvif-zeep-async==1.2.11
|
||||
|
||||
# homeassistant.components.opengarage
|
||||
open-garage==0.2.0
|
||||
@ -2150,7 +2150,7 @@ pytrafikverket==0.2.3
|
||||
pyudev==0.23.2
|
||||
|
||||
# homeassistant.components.unifiprotect
|
||||
pyunifiprotect==4.7.0
|
||||
pyunifiprotect==4.8.1
|
||||
|
||||
# homeassistant.components.uptimerobot
|
||||
pyuptimerobot==22.2.0
|
||||
|
@ -36,7 +36,7 @@ PyRMVtransport==0.3.3
|
||||
PySocks==1.7.1
|
||||
|
||||
# homeassistant.components.switchbot
|
||||
PySwitchbot==0.37.5
|
||||
PySwitchbot==0.37.6
|
||||
|
||||
# homeassistant.components.transport_nsw
|
||||
PyTransportNSW==0.1.1
|
||||
@ -517,7 +517,7 @@ energyzero==0.4.1
|
||||
enocean==0.50
|
||||
|
||||
# homeassistant.components.environment_canada
|
||||
env_canada==0.5.32
|
||||
env_canada==0.5.33
|
||||
|
||||
# homeassistant.components.enphase_envoy
|
||||
envoy_reader==0.20.1
|
||||
@ -938,7 +938,7 @@ omnilogic==0.4.5
|
||||
ondilo==0.2.0
|
||||
|
||||
# homeassistant.components.onvif
|
||||
onvif-zeep-async==1.2.3
|
||||
onvif-zeep-async==1.2.11
|
||||
|
||||
# homeassistant.components.opengarage
|
||||
open-garage==0.2.0
|
||||
@ -1540,7 +1540,7 @@ pytrafikverket==0.2.3
|
||||
pyudev==0.23.2
|
||||
|
||||
# homeassistant.components.unifiprotect
|
||||
pyunifiprotect==4.7.0
|
||||
pyunifiprotect==4.8.1
|
||||
|
||||
# homeassistant.components.uptimerobot
|
||||
pyuptimerobot==22.2.0
|
||||
|
@ -3,6 +3,7 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Iterable
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timedelta
|
||||
import enum
|
||||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
@ -72,9 +73,17 @@ EXPECTED_FEATURES = (
|
||||
class MockAyla(AylaApi):
|
||||
"""Mocked AylaApi that doesn't do anything."""
|
||||
|
||||
desired_expiry = False
|
||||
|
||||
async def async_sign_in(self):
|
||||
"""Instead of signing in, just return."""
|
||||
|
||||
async def async_refresh_auth(self):
|
||||
"""Instead of refreshing auth, just return."""
|
||||
|
||||
async def async_sign_out(self):
|
||||
"""Instead of signing out, just return."""
|
||||
|
||||
async def async_list_devices(self) -> list[dict]:
|
||||
"""Return the device list."""
|
||||
return [SHARK_DEVICE_DICT]
|
||||
@ -89,6 +98,18 @@ class MockAyla(AylaApi):
|
||||
async def async_request(self, http_method: str, url: str, **kwargs):
|
||||
"""Don't make an HTTP request."""
|
||||
|
||||
@property
|
||||
def token_expiring_soon(self) -> bool:
|
||||
"""Toggling Property for Token Expiration Flag."""
|
||||
# Alternate expiry flag for each test
|
||||
self.desired_expiry = not self.desired_expiry
|
||||
return self.desired_expiry
|
||||
|
||||
@property
|
||||
def auth_expiration(self) -> datetime:
|
||||
"""Sample expiration timestamp that is always 1200 seconds behind now()."""
|
||||
return datetime.now() - timedelta(seconds=1200)
|
||||
|
||||
|
||||
class MockShark(SharkIqVacuum):
|
||||
"""Mocked SharkIqVacuum that won't hit the API."""
|
||||
|
@ -7,6 +7,8 @@ from datetime import datetime, timedelta
|
||||
from functools import partial
|
||||
from ipaddress import IPv4Address
|
||||
import json
|
||||
from pathlib import Path
|
||||
from tempfile import gettempdir
|
||||
from typing import Any
|
||||
from unittest.mock import AsyncMock, Mock, patch
|
||||
|
||||
@ -105,6 +107,7 @@ def mock_ufp_client(bootstrap: Bootstrap):
|
||||
client.bootstrap = bootstrap
|
||||
client._bootstrap = bootstrap
|
||||
client.api_path = "/api"
|
||||
client.cache_dir = Path(gettempdir()) / "ufp_cache"
|
||||
# functionality from API client tests actually need
|
||||
client._stream_response = partial(ProtectApiClient._stream_response, client)
|
||||
client.get_camera_video = partial(ProtectApiClient.get_camera_video, client)
|
||||
|
@ -5,9 +5,12 @@ from unittest import mock
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
import zigpy.endpoint
|
||||
import zigpy.profiles.zha
|
||||
import zigpy.types as t
|
||||
from zigpy.zcl import foundation
|
||||
import zigpy.zcl.clusters
|
||||
import zigpy.zdo.types as zdo_t
|
||||
|
||||
import homeassistant.components.zha.core.channels as zha_channels
|
||||
import homeassistant.components.zha.core.channels.base as base_channels
|
||||
@ -726,3 +729,56 @@ async def test_cluster_no_ep_attribute(m1, zha_device_mock) -> None:
|
||||
pools = {pool.id: pool for pool in channels.pools}
|
||||
assert "1:0x042e" in pools[1].all_channels
|
||||
assert pools[1].all_channels["1:0x042e"].name
|
||||
|
||||
|
||||
async def test_configure_reporting(hass: HomeAssistant) -> None:
|
||||
"""Test setting up a channel and configuring attribute reporting in two batches."""
|
||||
|
||||
class TestZigbeeChannel(base_channels.ZigbeeChannel):
|
||||
BIND = True
|
||||
REPORT_CONFIG = (
|
||||
# By name
|
||||
base_channels.AttrReportConfig(attr="current_x", config=(1, 60, 1)),
|
||||
base_channels.AttrReportConfig(attr="current_hue", config=(1, 60, 2)),
|
||||
base_channels.AttrReportConfig(attr="color_temperature", config=(1, 60, 3)),
|
||||
base_channels.AttrReportConfig(attr="current_y", config=(1, 60, 4)),
|
||||
)
|
||||
|
||||
mock_ep = mock.AsyncMock(spec_set=zigpy.endpoint.Endpoint)
|
||||
mock_ep.device.zdo = AsyncMock()
|
||||
|
||||
cluster = zigpy.zcl.clusters.lighting.Color(mock_ep)
|
||||
cluster.bind = AsyncMock(
|
||||
spec_set=cluster.bind,
|
||||
return_value=[zdo_t.Status.SUCCESS], # ZDOCmd.Bind_rsp
|
||||
)
|
||||
cluster.configure_reporting_multiple = AsyncMock(
|
||||
spec_set=cluster.configure_reporting_multiple,
|
||||
return_value=[
|
||||
foundation.ConfigureReportingResponseRecord(
|
||||
status=foundation.Status.SUCCESS
|
||||
)
|
||||
],
|
||||
)
|
||||
|
||||
ch_pool = mock.AsyncMock(spec_set=zha_channels.ChannelPool)
|
||||
ch_pool.skip_configuration = False
|
||||
|
||||
channel = TestZigbeeChannel(cluster, ch_pool)
|
||||
await channel.async_configure()
|
||||
|
||||
# Since we request reporting for five attributes, we need to make two calls (3 + 1)
|
||||
assert cluster.configure_reporting_multiple.mock_calls == [
|
||||
mock.call(
|
||||
{
|
||||
"current_x": (1, 60, 1),
|
||||
"current_hue": (1, 60, 2),
|
||||
"color_temperature": (1, 60, 3),
|
||||
}
|
||||
),
|
||||
mock.call(
|
||||
{
|
||||
"current_y": (1, 60, 4),
|
||||
}
|
||||
),
|
||||
]
|
||||
|
Loading…
x
Reference in New Issue
Block a user