This commit is contained in:
Paulus Schoutsen 2023-05-05 15:23:51 -04:00 committed by GitHub
commit e904edb12e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
37 changed files with 590 additions and 209 deletions

View File

@ -3,7 +3,7 @@ from abc import ABC, abstractmethod
import asyncio
import logging
from homeassistant.core import CALLBACK_TYPE, callback
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
from homeassistant.helpers.storage import Store
from .const import DOMAIN
@ -19,7 +19,7 @@ class AbstractConfig(ABC):
_unsub_proactive_report: asyncio.Task[CALLBACK_TYPE] | None = None
def __init__(self, hass):
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize abstract config."""
self.hass = hass
self._store = None

View File

@ -199,14 +199,10 @@ class CloudAlexaConfig(alexa_config.AbstractConfig):
# Don't migrate if there's a YAML config
return
for state in self.hass.states.async_all():
async_expose_entity(
self.hass,
CLOUD_ALEXA,
state.entity_id,
self._should_expose_legacy(state.entity_id),
)
for entity_id in self._prefs.alexa_entity_configs:
for entity_id in {
*self.hass.states.async_entity_ids(),
*self._prefs.alexa_entity_configs,
}:
async_expose_entity(
self.hass,
CLOUD_ALEXA,
@ -220,8 +216,18 @@ class CloudAlexaConfig(alexa_config.AbstractConfig):
async def on_hass_started(hass):
if self._prefs.alexa_settings_version != ALEXA_SETTINGS_VERSION:
if self._prefs.alexa_settings_version < 2:
if self._prefs.alexa_settings_version < 2 or (
# Recover from a bug we had in 2023.5.0 where entities didn't get exposed
self._prefs.alexa_settings_version < 3
and not any(
settings.get("should_expose", False)
for settings in async_get_assistant_settings(
hass, CLOUD_ALEXA
).values()
)
):
self._migrate_alexa_entity_settings_v1()
await self._prefs.async_update(
alexa_settings_version=ALEXA_SETTINGS_VERSION
)

View File

@ -12,6 +12,7 @@ from homeassistant.components.google_assistant import DOMAIN as GOOGLE_DOMAIN
from homeassistant.components.google_assistant.helpers import AbstractConfig
from homeassistant.components.homeassistant.exposed_entities import (
async_expose_entity,
async_get_assistant_settings,
async_get_entity_settings,
async_listen_entity_updates,
async_set_assistant_option,
@ -175,23 +176,10 @@ class CloudGoogleConfig(AbstractConfig):
# Don't migrate if there's a YAML config
return
for state in self.hass.states.async_all():
entity_id = state.entity_id
async_expose_entity(
self.hass,
CLOUD_GOOGLE,
entity_id,
self._should_expose_legacy(entity_id),
)
if _2fa_disabled := (self._2fa_disabled_legacy(entity_id) is not None):
async_set_assistant_option(
self.hass,
CLOUD_GOOGLE,
entity_id,
PREF_DISABLE_2FA,
_2fa_disabled,
)
for entity_id in self._prefs.google_entity_configs:
for entity_id in {
*self.hass.states.async_entity_ids(),
*self._prefs.google_entity_configs,
}:
async_expose_entity(
self.hass,
CLOUD_GOOGLE,
@ -213,8 +201,18 @@ class CloudGoogleConfig(AbstractConfig):
async def on_hass_started(hass: HomeAssistant) -> None:
if self._prefs.google_settings_version != GOOGLE_SETTINGS_VERSION:
if self._prefs.google_settings_version < 2:
if self._prefs.google_settings_version < 2 or (
# Recover from a bug we had in 2023.5.0 where entities didn't get exposed
self._prefs.google_settings_version < 3
and not any(
settings.get("should_expose", False)
for settings in async_get_assistant_settings(
hass, CLOUD_GOOGLE
).values()
)
):
self._migrate_google_entity_settings_v1()
await self._prefs.async_update(
google_settings_version=GOOGLE_SETTINGS_VERSION
)

View File

@ -41,8 +41,8 @@ STORAGE_KEY = DOMAIN
STORAGE_VERSION = 1
STORAGE_VERSION_MINOR = 2
ALEXA_SETTINGS_VERSION = 2
GOOGLE_SETTINGS_VERSION = 2
ALEXA_SETTINGS_VERSION = 3
GOOGLE_SETTINGS_VERSION = 3
class CloudPreferencesStore(Store):

View File

@ -15,5 +15,5 @@
"documentation": "https://www.home-assistant.io/integrations/elkm1",
"iot_class": "local_push",
"loggers": ["elkm1_lib"],
"requirements": ["elkm1-lib==2.2.1"]
"requirements": ["elkm1-lib==2.2.2"]
}

View File

@ -25,6 +25,7 @@ from aioesphomeapi import (
NumberInfo,
SelectInfo,
SensorInfo,
SensorState,
SwitchInfo,
TextSensorInfo,
UserService,
@ -240,9 +241,18 @@ class RuntimeEntryData:
current_state_by_type = self.state[state_type]
current_state = current_state_by_type.get(key, _SENTINEL)
subscription_key = (state_type, key)
if current_state == state and subscription_key not in stale_state:
if (
current_state == state
and subscription_key not in stale_state
and not (
type(state) is SensorState # pylint: disable=unidiomatic-typecheck
and (platform_info := self.info.get(Platform.SENSOR))
and (entity_info := platform_info.get(state.key))
and (cast(SensorInfo, entity_info)).force_update
)
):
_LOGGER.debug(
"%s: ignoring duplicate update with and key %s: %s",
"%s: ignoring duplicate update with key %s: %s",
self.name,
key,
state,

View File

@ -15,7 +15,7 @@
"iot_class": "local_push",
"loggers": ["aioesphomeapi", "noiseprotocol"],
"requirements": [
"aioesphomeapi==13.7.2",
"aioesphomeapi==13.7.3",
"bluetooth-data-tools==0.4.0",
"esphome-dashboard-api==1.2.3"
],

View File

@ -20,5 +20,5 @@
"documentation": "https://www.home-assistant.io/integrations/frontend",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20230503.2"]
"requirements": ["home-assistant-frontend==20230503.3"]
}

View File

@ -590,7 +590,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
await async_setup_addon_panel(hass, hassio)
# Setup hardware integration for the detected board type
async def _async_setup_hardware_integration(hass):
async def _async_setup_hardware_integration(_: datetime) -> None:
"""Set up hardaware integration for the detected board type."""
if (os_info := get_os_info(hass)) is None:
# os info not yet fetched from supervisor, retry later
@ -610,7 +610,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
)
)
await _async_setup_hardware_integration(hass)
await _async_setup_hardware_integration(datetime.now())
hass.async_create_task(
hass.config_entries.flow.async_init(DOMAIN, context={"source": "system"})

View File

@ -266,7 +266,7 @@ SENSOR_TYPES: tuple[NetatmoSensorEntityDescription, ...] = (
netatmo_name="power",
entity_registry_enabled_default=True,
native_unit_of_measurement=UnitOfPower.WATT,
state_class=SensorStateClass.TOTAL,
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.POWER,
),
)

View File

@ -34,7 +34,7 @@ class RebootButton(ONVIFBaseEntity, ButtonEntity):
async def async_press(self) -> None:
"""Send out a SystemReboot command."""
device_mgmt = self.device.device.create_devicemgmt_service()
device_mgmt = await self.device.device.create_devicemgmt_service()
await device_mgmt.SystemReboot()

View File

@ -275,7 +275,7 @@ class OnvifFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
try:
await device.update_xaddrs()
device_mgmt = device.create_devicemgmt_service()
device_mgmt = await device.create_devicemgmt_service()
# Get the MAC address to use as the unique ID for the config flow
if not self.device_id:
try:
@ -314,7 +314,7 @@ class OnvifFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
}
)
# Verify there is an H264 profile
media_service = device.create_media_service()
media_service = await device.create_media_service()
profiles = await media_service.GetProfiles()
except AttributeError: # Likely an empty document or 404 from the wrong port
LOGGER.debug(

View File

@ -136,7 +136,7 @@ class ONVIFDevice:
if self.capabilities.ptz:
LOGGER.debug("%s: creating PTZ service", self.name)
self.device.create_ptz_service()
await self.device.create_ptz_service()
# Determine max resolution from profiles
self.max_resolution = max(
@ -159,7 +159,7 @@ class ONVIFDevice:
async def async_manually_set_date_and_time(self) -> None:
"""Set Date and Time Manually using SetSystemDateAndTime command."""
device_mgmt = self.device.create_devicemgmt_service()
device_mgmt = await self.device.create_devicemgmt_service()
# Retrieve DateTime object from camera to use as template for Set operation
device_time = await device_mgmt.GetSystemDateAndTime()
@ -202,7 +202,7 @@ class ONVIFDevice:
async def async_check_date_and_time(self) -> None:
"""Warns if device and system date not synced."""
LOGGER.debug("%s: Setting up the ONVIF device management service", self.name)
device_mgmt = self.device.create_devicemgmt_service()
device_mgmt = await self.device.create_devicemgmt_service()
system_date = dt_util.utcnow()
LOGGER.debug("%s: Retrieving current device date/time", self.name)
@ -285,7 +285,7 @@ class ONVIFDevice:
async def async_get_device_info(self) -> DeviceInfo:
"""Obtain information about this device."""
device_mgmt = self.device.create_devicemgmt_service()
device_mgmt = await self.device.create_devicemgmt_service()
manufacturer = None
model = None
firmware_version = None
@ -331,7 +331,7 @@ class ONVIFDevice:
"""Obtain information about the available services on the device."""
snapshot = False
with suppress(*GET_CAPABILITIES_EXCEPTIONS):
media_service = self.device.create_media_service()
media_service = await self.device.create_media_service()
media_capabilities = await media_service.GetServiceCapabilities()
snapshot = media_capabilities and media_capabilities.SnapshotUri
@ -342,7 +342,7 @@ class ONVIFDevice:
imaging = False
with suppress(*GET_CAPABILITIES_EXCEPTIONS):
self.device.create_imaging_service()
await self.device.create_imaging_service()
imaging = True
return Capabilities(snapshot=snapshot, ptz=ptz, imaging=imaging)
@ -361,7 +361,7 @@ class ONVIFDevice:
async def async_get_profiles(self) -> list[Profile]:
"""Obtain media profiles for this device."""
media_service = self.device.create_media_service()
media_service = await self.device.create_media_service()
LOGGER.debug("%s: xaddr for media_service: %s", self.name, media_service.xaddr)
try:
result = await media_service.GetProfiles()
@ -408,7 +408,7 @@ class ONVIFDevice:
)
try:
ptz_service = self.device.create_ptz_service()
ptz_service = await self.device.create_ptz_service()
presets = await ptz_service.GetPresets(profile.token)
profile.ptz.presets = [preset.token for preset in presets if preset]
except GET_CAPABILITIES_EXCEPTIONS:
@ -427,7 +427,7 @@ class ONVIFDevice:
async def async_get_stream_uri(self, profile: Profile) -> str:
"""Get the stream URI for a specified profile."""
media_service = self.device.create_media_service()
media_service = await self.device.create_media_service()
req = media_service.create_type("GetStreamUri")
req.ProfileToken = profile.token
req.StreamSetup = {
@ -454,7 +454,7 @@ class ONVIFDevice:
LOGGER.warning("PTZ actions are not supported on device '%s'", self.name)
return
ptz_service = self.device.create_ptz_service()
ptz_service = await self.device.create_ptz_service()
pan_val = distance * PAN_FACTOR.get(pan, 0)
tilt_val = distance * TILT_FACTOR.get(tilt, 0)
@ -576,7 +576,7 @@ class ONVIFDevice:
LOGGER.warning("PTZ actions are not supported on device '%s'", self.name)
return
ptz_service = self.device.create_ptz_service()
ptz_service = await self.device.create_ptz_service()
LOGGER.debug(
"Running Aux Command | Cmd = %s",
@ -607,7 +607,7 @@ class ONVIFDevice:
)
return
imaging_service = self.device.create_imaging_service()
imaging_service = await self.device.create_imaging_service()
LOGGER.debug("Setting Imaging Setting | Settings = %s", settings)
try:

View File

@ -9,7 +9,7 @@ import datetime as dt
from aiohttp.web import Request
from httpx import RemoteProtocolError, RequestError, TransportError
from onvif import ONVIFCamera, ONVIFService
from onvif.client import NotificationManager
from onvif.client import NotificationManager, retry_connection_error
from onvif.exceptions import ONVIFError
from zeep.exceptions import Fault, ValidationError, XMLParseError
@ -40,8 +40,8 @@ SET_SYNCHRONIZATION_POINT_ERRORS = (*SUBSCRIPTION_ERRORS, TypeError)
UNSUBSCRIBE_ERRORS = (XMLParseError, *SUBSCRIPTION_ERRORS)
RENEW_ERRORS = (ONVIFError, RequestError, XMLParseError, *SUBSCRIPTION_ERRORS)
#
# We only keep the subscription alive for 3 minutes, and will keep
# renewing it every 1.5 minutes. This is to avoid the camera
# We only keep the subscription alive for 10 minutes, and will keep
# renewing it every 8 minutes. This is to avoid the camera
# accumulating subscriptions which will be impossible to clean up
# since ONVIF does not provide a way to list existing subscriptions.
#
@ -49,12 +49,25 @@ RENEW_ERRORS = (ONVIFError, RequestError, XMLParseError, *SUBSCRIPTION_ERRORS)
# sending events to us, and we will not be able to recover until
# the subscriptions expire or the camera is rebooted.
#
SUBSCRIPTION_TIME = dt.timedelta(minutes=3)
SUBSCRIPTION_RELATIVE_TIME = (
"PT3M" # use relative time since the time on the camera is not reliable
)
SUBSCRIPTION_RENEW_INTERVAL = SUBSCRIPTION_TIME.total_seconds() / 2
SUBSCRIPTION_RENEW_INTERVAL_ON_ERROR = 60.0
SUBSCRIPTION_TIME = dt.timedelta(minutes=10)
# SUBSCRIPTION_RELATIVE_TIME uses a relative time since the time on the camera
# is not reliable. We use 600 seconds (10 minutes) since some cameras cannot
# parse time in the format "PT10M" (10 minutes).
SUBSCRIPTION_RELATIVE_TIME = "PT600S"
# SUBSCRIPTION_RENEW_INTERVAL Must be less than the
# overall timeout of 90 * (SUBSCRIPTION_ATTEMPTS) 2 = 180 seconds
#
# We use 8 minutes between renewals to make sure we never hit the
# 10 minute limit even if the first renewal attempt fails
SUBSCRIPTION_RENEW_INTERVAL = 8 * 60
# The number of attempts to make when creating or renewing a subscription
SUBSCRIPTION_ATTEMPTS = 2
# The time to wait before trying to restart the subscription if it fails
SUBSCRIPTION_RESTART_INTERVAL_ON_ERROR = 60
PULLPOINT_POLL_TIME = dt.timedelta(seconds=60)
PULLPOINT_MESSAGE_LIMIT = 100
@ -276,7 +289,13 @@ class PullPointManager:
"""Pause pullpoint subscription."""
LOGGER.debug("%s: Pausing PullPoint manager", self._name)
self.state = PullPointManagerState.PAUSED
self._hass.async_create_task(self._async_cancel_and_unsubscribe())
# Cancel the renew job so we don't renew the subscription
# and stop pulling messages.
self._async_cancel_pullpoint_renew()
self.async_cancel_pull_messages()
# We do not unsubscribe from the pullpoint subscription and instead
# let the subscription expire since some cameras will terminate all
# subscriptions if we unsubscribe which will break the webhook.
@callback
def async_resume(self) -> None:
@ -327,20 +346,7 @@ class PullPointManager:
async def _async_start_pullpoint(self) -> bool:
"""Start pullpoint subscription."""
try:
try:
started = await self._async_create_pullpoint_subscription()
except RequestError:
#
# We should only need to retry on RemoteProtocolError but some cameras
# are flaky and sometimes do not respond to the Renew request so we
# retry on RequestError as well.
#
# For RemoteProtocolError:
# http://datatracker.ietf.org/doc/html/rfc2616#section-8.1.4 allows the server
# to close the connection at any time, we treat this as a normal and try again
# once since we do not want to declare the camera as not supporting PullPoint
# if it just happened to close the connection at the wrong time.
started = await self._async_create_pullpoint_subscription()
started = await self._async_create_pullpoint_subscription()
except CREATE_ERRORS as err:
LOGGER.debug(
"%s: Device does not support PullPoint service or has too many subscriptions: %s",
@ -372,16 +378,16 @@ class PullPointManager:
# scheduled when the current one is done if needed.
return
async with self._renew_lock:
next_attempt = SUBSCRIPTION_RENEW_INTERVAL_ON_ERROR
next_attempt = SUBSCRIPTION_RESTART_INTERVAL_ON_ERROR
try:
if (
await self._async_renew_pullpoint()
or await self._async_restart_pullpoint()
):
if await self._async_renew_pullpoint():
next_attempt = SUBSCRIPTION_RENEW_INTERVAL
else:
await self._async_restart_pullpoint()
finally:
self.async_schedule_pullpoint_renew(next_attempt)
@retry_connection_error(SUBSCRIPTION_ATTEMPTS)
async def _async_create_pullpoint_subscription(self) -> bool:
"""Create pullpoint subscription."""
@ -392,12 +398,12 @@ class PullPointManager:
return False
# Create subscription manager
self._pullpoint_subscription = self._device.create_subscription_service(
self._pullpoint_subscription = await self._device.create_subscription_service(
"PullPointSubscription"
)
# Create the service that will be used to pull messages from the device.
self._pullpoint_service = self._device.create_pullpoint_service()
self._pullpoint_service = await self._device.create_pullpoint_service()
# Initialize events
with suppress(*SET_SYNCHRONIZATION_POINT_ERRORS):
@ -447,6 +453,11 @@ class PullPointManager:
)
self._pullpoint_subscription = None
@retry_connection_error(SUBSCRIPTION_ATTEMPTS)
async def _async_call_pullpoint_subscription_renew(self) -> None:
"""Call PullPoint subscription Renew."""
await self._pullpoint_subscription.Renew(SUBSCRIPTION_RELATIVE_TIME)
async def _async_renew_pullpoint(self) -> bool:
"""Renew the PullPoint subscription."""
if (
@ -458,20 +469,7 @@ class PullPointManager:
# The first time we renew, we may get a Fault error so we
# suppress it. The subscription will be restarted in
# async_restart later.
try:
await self._pullpoint_subscription.Renew(SUBSCRIPTION_RELATIVE_TIME)
except RequestError:
#
# We should only need to retry on RemoteProtocolError but some cameras
# are flaky and sometimes do not respond to the Renew request so we
# retry on RequestError as well.
#
# For RemoteProtocolError:
# http://datatracker.ietf.org/doc/html/rfc2616#section-8.1.4 allows the server
# to close the connection at any time, we treat this as a normal and try again
# once since we do not want to mark events as stale
# if it just happened to close the connection at the wrong time.
await self._pullpoint_subscription.Renew(SUBSCRIPTION_RELATIVE_TIME)
await self._async_call_pullpoint_subscription_renew()
LOGGER.debug("%s: Renewed PullPoint subscription", self._name)
return True
except RENEW_ERRORS as err:
@ -521,7 +519,7 @@ class PullPointManager:
stringify_onvif_error(err),
)
return True
except (XMLParseError, *SUBSCRIPTION_ERRORS) as err:
except Fault as err:
# Device may not support subscriptions so log at debug level
# when we get an XMLParseError
LOGGER.debug(
@ -532,6 +530,16 @@ class PullPointManager:
# Treat errors as if the camera restarted. Assume that the pullpoint
# subscription is no longer valid.
return False
except (XMLParseError, RequestError, TimeoutError, TransportError) as err:
LOGGER.debug(
"%s: PullPoint subscription encountered an unexpected error and will be retried "
"(this is normal for some cameras): %s",
self._name,
stringify_onvif_error(err),
)
# Avoid renewing the subscription too often since it causes problems
# for some cameras, mainly the Tapo ones.
return True
if self.state != PullPointManagerState.STARTED:
# If the webhook became started working during the long poll,
@ -655,6 +663,7 @@ class WebHookManager:
self._renew_or_restart_job,
)
@retry_connection_error(SUBSCRIPTION_ATTEMPTS)
async def _async_create_webhook_subscription(self) -> None:
"""Create webhook subscription."""
LOGGER.debug(
@ -689,20 +698,7 @@ class WebHookManager:
async def _async_start_webhook(self) -> bool:
"""Start webhook."""
try:
try:
await self._async_create_webhook_subscription()
except RequestError:
#
# We should only need to retry on RemoteProtocolError but some cameras
# are flaky and sometimes do not respond to the Renew request so we
# retry on RequestError as well.
#
# For RemoteProtocolError:
# http://datatracker.ietf.org/doc/html/rfc2616#section-8.1.4 allows the server
# to close the connection at any time, we treat this as a normal and try again
# once since we do not want to declare the camera as not supporting webhooks
# if it just happened to close the connection at the wrong time.
await self._async_create_webhook_subscription()
await self._async_create_webhook_subscription()
except CREATE_ERRORS as err:
self._event_manager.async_webhook_failed()
LOGGER.debug(
@ -720,6 +716,12 @@ class WebHookManager:
await self._async_unsubscribe_webhook()
return await self._async_start_webhook()
@retry_connection_error(SUBSCRIPTION_ATTEMPTS)
async def _async_call_webhook_subscription_renew(self) -> None:
"""Call PullPoint subscription Renew."""
assert self._webhook_subscription is not None
await self._webhook_subscription.Renew(SUBSCRIPTION_RELATIVE_TIME)
async def _async_renew_webhook(self) -> bool:
"""Renew webhook subscription."""
if (
@ -728,20 +730,7 @@ class WebHookManager:
):
return False
try:
try:
await self._webhook_subscription.Renew(SUBSCRIPTION_RELATIVE_TIME)
except RequestError:
#
# We should only need to retry on RemoteProtocolError but some cameras
# are flaky and sometimes do not respond to the Renew request so we
# retry on RequestError as well.
#
# For RemoteProtocolError:
# http://datatracker.ietf.org/doc/html/rfc2616#section-8.1.4 allows the server
# to close the connection at any time, we treat this as a normal and try again
# once since we do not want to mark events as stale
# if it just happened to close the connection at the wrong time.
await self._webhook_subscription.Renew(SUBSCRIPTION_RELATIVE_TIME)
await self._async_call_webhook_subscription_renew()
LOGGER.debug("%s: Renewed Webhook subscription", self._name)
return True
except RENEW_ERRORS as err:
@ -765,13 +754,12 @@ class WebHookManager:
# scheduled when the current one is done if needed.
return
async with self._renew_lock:
next_attempt = SUBSCRIPTION_RENEW_INTERVAL_ON_ERROR
next_attempt = SUBSCRIPTION_RESTART_INTERVAL_ON_ERROR
try:
if (
await self._async_renew_webhook()
or await self._async_restart_webhook()
):
if await self._async_renew_webhook():
next_attempt = SUBSCRIPTION_RENEW_INTERVAL
else:
await self._async_restart_webhook()
finally:
self._async_schedule_webhook_renew(next_attempt)

View File

@ -8,5 +8,5 @@
"documentation": "https://www.home-assistant.io/integrations/onvif",
"iot_class": "local_push",
"loggers": ["onvif", "wsdiscovery", "zeep"],
"requirements": ["onvif-zeep-async==1.3.1", "WSDiscovery==2.0.0"]
"requirements": ["onvif-zeep-async==2.1.1", "WSDiscovery==2.0.0"]
}

View File

@ -15,6 +15,19 @@ PARSERS: Registry[
str, Callable[[str, Any], Coroutine[Any, Any, Event | None]]
] = Registry()
VIDEO_SOURCE_MAPPING = {
"vsconf": "VideoSourceToken",
}
def _normalize_video_source(source: str) -> str:
"""Normalize video source.
Some cameras do not set the VideoSourceToken correctly so we get duplicate
sensors, so we need to normalize it to the correct value.
"""
return VIDEO_SOURCE_MAPPING.get(source, source)
def local_datetime_or_none(value: str) -> datetime.datetime | None:
"""Convert strings to datetimes, if invalid, return None."""
@ -188,7 +201,7 @@ async def async_parse_field_detector(uid: str, msg) -> Event | None:
rule = ""
for source in msg.Message._value_1.Source.SimpleItem:
if source.Name == "VideoSourceConfigurationToken":
video_source = source.Value
video_source = _normalize_video_source(source.Value)
if source.Name == "VideoAnalyticsConfigurationToken":
video_analytics = source.Value
if source.Name == "Rule":
@ -220,7 +233,7 @@ async def async_parse_cell_motion_detector(uid: str, msg) -> Event | None:
rule = ""
for source in msg.Message._value_1.Source.SimpleItem:
if source.Name == "VideoSourceConfigurationToken":
video_source = source.Value
video_source = _normalize_video_source(source.Value)
if source.Name == "VideoAnalyticsConfigurationToken":
video_analytics = source.Value
if source.Name == "Rule":
@ -251,7 +264,7 @@ async def async_parse_motion_region_detector(uid: str, msg) -> Event | None:
rule = ""
for source in msg.Message._value_1.Source.SimpleItem:
if source.Name == "VideoSourceConfigurationToken":
video_source = source.Value
video_source = _normalize_video_source(source.Value)
if source.Name == "VideoAnalyticsConfigurationToken":
video_analytics = source.Value
if source.Name == "Rule":
@ -282,7 +295,7 @@ async def async_parse_tamper_detector(uid: str, msg) -> Event | None:
rule = ""
for source in msg.Message._value_1.Source.SimpleItem:
if source.Name == "VideoSourceConfigurationToken":
video_source = source.Value
video_source = _normalize_video_source(source.Value)
if source.Name == "VideoAnalyticsConfigurationToken":
video_analytics = source.Value
if source.Name == "Rule":
@ -312,7 +325,7 @@ async def async_parse_dog_cat_detector(uid: str, msg) -> Event | None:
video_source = ""
for source in msg.Message._value_1.Source.SimpleItem:
if source.Name == "Source":
video_source = source.Value
video_source = _normalize_video_source(source.Value)
return Event(
f"{uid}_{msg.Topic._value_1}_{video_source}",
@ -337,7 +350,7 @@ async def async_parse_vehicle_detector(uid: str, msg) -> Event | None:
video_source = ""
for source in msg.Message._value_1.Source.SimpleItem:
if source.Name == "Source":
video_source = source.Value
video_source = _normalize_video_source(source.Value)
return Event(
f"{uid}_{msg.Topic._value_1}_{video_source}",
@ -362,7 +375,7 @@ async def async_parse_person_detector(uid: str, msg) -> Event | None:
video_source = ""
for source in msg.Message._value_1.Source.SimpleItem:
if source.Name == "Source":
video_source = source.Value
video_source = _normalize_video_source(source.Value)
return Event(
f"{uid}_{msg.Topic._value_1}_{video_source}",
@ -387,7 +400,7 @@ async def async_parse_face_detector(uid: str, msg) -> Event | None:
video_source = ""
for source in msg.Message._value_1.Source.SimpleItem:
if source.Name == "Source":
video_source = source.Value
video_source = _normalize_video_source(source.Value)
return Event(
f"{uid}_{msg.Topic._value_1}_{video_source}",
@ -401,6 +414,31 @@ async def async_parse_face_detector(uid: str, msg) -> Event | None:
return None
@PARSERS.register("tns1:RuleEngine/MyRuleDetector/Visitor")
# pylint: disable=protected-access
async def async_parse_visitor_detector(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:RuleEngine/MyRuleDetector/Visitor
"""
try:
video_source = ""
for source in msg.Message._value_1.Source.SimpleItem:
if source.Name == "Source":
video_source = _normalize_video_source(source.Value)
return Event(
f"{uid}_{msg.Topic._value_1}_{video_source}",
"Visitor Detection",
"binary_sensor",
"occupancy",
None,
msg.Message._value_1.Data.SimpleItem[0].Value == "true",
)
except (AttributeError, KeyError):
return None
@PARSERS.register("tns1:Device/Trigger/DigitalInput")
# pylint: disable=protected-access
async def async_parse_digital_input(uid: str, msg) -> Event | None:
@ -658,7 +696,7 @@ async def async_parse_count_aggregation_counter(uid: str, msg) -> Event | None:
rule = ""
for source in msg.Message._value_1.Source.SimpleItem:
if source.Name == "VideoSourceConfigurationToken":
video_source = source.Value
video_source = _normalize_video_source(source.Value)
if source.Name == "VideoAnalyticsConfigurationToken":
video_analytics = source.Value
if source.Name == "Rule":

View File

@ -34,7 +34,7 @@ def stringify_onvif_error(error: Exception) -> str:
message += f" (actor:{error.actor})"
else:
message = str(error)
return message or "Device sent empty error"
return message or f"Device sent empty error with type {type(error)}"
def is_auth_error(error: Exception) -> bool:

View File

@ -38,7 +38,8 @@ DEFAULT_ALTITUDE = 0
EVENT_OPENSKY_ENTRY = f"{DOMAIN}_entry"
EVENT_OPENSKY_EXIT = f"{DOMAIN}_exit"
SCAN_INTERVAL = timedelta(seconds=12) # opensky public limit is 10 seconds
# OpenSky free user has 400 credits, with 4 credits per API call. 100/24 = ~4 requests per hour
SCAN_INTERVAL = timedelta(minutes=15)
OPENSKY_API_URL = "https://opensky-network.org/api/states/all"
OPENSKY_API_FIELDS = [

View File

@ -29,7 +29,7 @@ apply:
name: Entities state
description: The entities and the state that they need to be.
required: true
example:
example: |
light.kitchen: "on"
light.ceiling:
state: "on"
@ -60,7 +60,7 @@ create:
entities:
name: Entities state
description: The entities to control with the scene.
example:
example: |
light.tv_back_light: "on"
light.ceiling:
state: "on"
@ -70,7 +70,7 @@ create:
snapshot_entities:
name: Snapshot entities
description: The entities of which a snapshot is to be taken
example:
example: |
- light.ceiling
- light.kitchen
selector:

View File

@ -123,7 +123,7 @@ class SIAAlarmControlPanel(SIABaseEntity, AlarmControlPanelEntity):
"""
new_state = None
if sia_event.code:
new_state = self.entity_description.code_consequences[sia_event.code]
new_state = self.entity_description.code_consequences.get(sia_event.code)
if new_state is None:
return False
_LOGGER.debug("New state will be %s", new_state)

View File

@ -132,7 +132,7 @@ class SIABinarySensor(SIABaseEntity, BinarySensorEntity):
"""
new_state = None
if sia_event.code:
new_state = self.entity_description.code_consequences[sia_event.code]
new_state = self.entity_description.code_consequences.get(sia_event.code)
if new_state is None:
return False
_LOGGER.debug("New state will be %s", new_state)

View File

@ -8,5 +8,5 @@
"iot_class": "local_push",
"loggers": ["hatasmota"],
"mqtt": ["tasmota/discovery/#"],
"requirements": ["hatasmota==0.6.4"]
"requirements": ["hatasmota==0.6.5"]
}

View File

@ -7,7 +7,11 @@ import logging
from typing import Any
import transmission_rpc
from transmission_rpc.error import TransmissionError
from transmission_rpc.error import (
TransmissionAuthError,
TransmissionConnectError,
TransmissionError,
)
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
@ -137,14 +141,13 @@ async def get_api(hass, entry):
_LOGGER.debug("Successfully connected to %s", host)
return api
except TransmissionAuthError as error:
_LOGGER.error("Credentials for Transmission client are not valid")
raise AuthenticationError from error
except TransmissionConnectError as error:
_LOGGER.error("Connecting to the Transmission client %s failed", host)
raise CannotConnect from error
except TransmissionError as error:
if "401: Unauthorized" in str(error):
_LOGGER.error("Credentials for Transmission client are not valid")
raise AuthenticationError from error
if "111: Connection refused" in str(error):
_LOGGER.error("Connecting to the Transmission client %s failed", host)
raise CannotConnect from error
_LOGGER.error(error)
raise UnknownError from error

View File

@ -137,7 +137,19 @@ class Endpoint:
):
cluster_handler_class = MultistateInput
# end of ugly hack
cluster_handler = cluster_handler_class(cluster, self)
try:
cluster_handler = cluster_handler_class(cluster, self)
except KeyError as err:
_LOGGER.warning(
"Cluster handler %s for cluster %s on endpoint %s is invalid: %s",
cluster_handler_class,
cluster,
self,
err,
)
continue
if cluster_handler.name == const.CLUSTER_HANDLER_POWER_CONFIGURATION:
self._device.power_configuration_ch = cluster_handler
elif cluster_handler.name == const.CLUSTER_HANDLER_IDENTIFY:

View File

@ -8,7 +8,7 @@ from .backports.enum import StrEnum
APPLICATION_NAME: Final = "HomeAssistant"
MAJOR_VERSION: Final = 2023
MINOR_VERSION: Final = 5
PATCH_VERSION: Final = "1"
PATCH_VERSION: Final = "2"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 10, 0)

View File

@ -25,7 +25,7 @@ ha-av==10.0.0
hass-nabucasa==0.66.2
hassil==1.0.6
home-assistant-bluetooth==1.10.0
home-assistant-frontend==20230503.2
home-assistant-frontend==20230503.3
home-assistant-intents==2023.4.26
httpx==0.24.0
ifaddr==0.1.7

View File

@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "homeassistant"
version = "2023.5.1"
version = "2023.5.2"
license = {text = "Apache-2.0"}
description = "Open-source home automation platform running on Python 3."
readme = "README.rst"

View File

@ -156,7 +156,7 @@ aioecowitt==2023.01.0
aioemonitor==1.0.5
# homeassistant.components.esphome
aioesphomeapi==13.7.2
aioesphomeapi==13.7.3
# homeassistant.components.flo
aioflo==2021.11.0
@ -644,7 +644,7 @@ elgato==4.0.1
eliqonline==1.2.2
# homeassistant.components.elkm1
elkm1-lib==2.2.1
elkm1-lib==2.2.2
# homeassistant.components.elmax
elmax_api==0.0.4
@ -881,7 +881,7 @@ hass_splunk==0.1.1
hassil==1.0.6
# homeassistant.components.tasmota
hatasmota==0.6.4
hatasmota==0.6.5
# homeassistant.components.jewish_calendar
hdate==0.10.4
@ -911,7 +911,7 @@ hole==0.8.0
holidays==0.21.13
# homeassistant.components.frontend
home-assistant-frontend==20230503.2
home-assistant-frontend==20230503.3
# homeassistant.components.conversation
home-assistant-intents==2023.4.26
@ -1264,7 +1264,7 @@ ondilo==0.2.0
onkyo-eiscp==1.2.7
# homeassistant.components.onvif
onvif-zeep-async==1.3.1
onvif-zeep-async==2.1.1
# homeassistant.components.opengarage
open-garage==0.2.0

View File

@ -146,7 +146,7 @@ aioecowitt==2023.01.0
aioemonitor==1.0.5
# homeassistant.components.esphome
aioesphomeapi==13.7.2
aioesphomeapi==13.7.3
# homeassistant.components.flo
aioflo==2021.11.0
@ -506,7 +506,7 @@ easyenergy==0.3.0
elgato==4.0.1
# homeassistant.components.elkm1
elkm1-lib==2.2.1
elkm1-lib==2.2.2
# homeassistant.components.elmax
elmax_api==0.0.4
@ -679,7 +679,7 @@ hass-nabucasa==0.66.2
hassil==1.0.6
# homeassistant.components.tasmota
hatasmota==0.6.4
hatasmota==0.6.5
# homeassistant.components.jewish_calendar
hdate==0.10.4
@ -700,7 +700,7 @@ hole==0.8.0
holidays==0.21.13
# homeassistant.components.frontend
home-assistant-frontend==20230503.2
home-assistant-frontend==20230503.3
# homeassistant.components.conversation
home-assistant-intents==2023.4.26
@ -945,7 +945,7 @@ omnilogic==0.4.5
ondilo==0.2.0
# homeassistant.components.onvif
onvif-zeep-async==1.3.1
onvif-zeep-async==2.1.1
# homeassistant.components.opengarage
open-garage==0.2.0

View File

@ -542,11 +542,13 @@ async def test_alexa_handle_logout(
assert len(mock_enable.return_value.mock_calls) == 1
@pytest.mark.parametrize("alexa_settings_version", [1, 2])
async def test_alexa_config_migrate_expose_entity_prefs(
hass: HomeAssistant,
cloud_prefs: CloudPreferences,
cloud_stub,
entity_registry: er.EntityRegistry,
alexa_settings_version: int,
) -> None:
"""Test migrating Alexa entity config."""
hass.state = CoreState.starting
@ -593,7 +595,7 @@ async def test_alexa_config_migrate_expose_entity_prefs(
await cloud_prefs.async_update(
alexa_enabled=True,
alexa_report_state=False,
alexa_settings_version=1,
alexa_settings_version=alexa_settings_version,
)
expose_entity(hass, entity_migrated.entity_id, False)
@ -641,6 +643,100 @@ async def test_alexa_config_migrate_expose_entity_prefs(
}
async def test_alexa_config_migrate_expose_entity_prefs_v2_no_exposed(
hass: HomeAssistant,
cloud_prefs: CloudPreferences,
entity_registry: er.EntityRegistry,
) -> None:
"""Test migrating Alexa entity config from v2 to v3 when no entity is exposed."""
hass.state = CoreState.starting
assert await async_setup_component(hass, "homeassistant", {})
hass.states.async_set("light.state_only", "on")
entity_migrated = entity_registry.async_get_or_create(
"light",
"test",
"light_migrated",
suggested_object_id="migrated",
)
await cloud_prefs.async_update(
alexa_enabled=True,
alexa_report_state=False,
alexa_settings_version=2,
)
expose_entity(hass, "light.state_only", False)
expose_entity(hass, entity_migrated.entity_id, False)
cloud_prefs._prefs[PREF_ALEXA_ENTITY_CONFIGS]["light.state_only"] = {
PREF_SHOULD_EXPOSE: True
}
cloud_prefs._prefs[PREF_ALEXA_ENTITY_CONFIGS][entity_migrated.entity_id] = {
PREF_SHOULD_EXPOSE: True
}
conf = alexa_config.CloudAlexaConfig(
hass, ALEXA_SCHEMA({}), "mock-user-id", cloud_prefs, Mock(is_logged_in=False)
)
await conf.async_initialize()
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
assert async_get_entity_settings(hass, "light.state_only") == {
"cloud.alexa": {"should_expose": True}
}
assert async_get_entity_settings(hass, entity_migrated.entity_id) == {
"cloud.alexa": {"should_expose": True}
}
async def test_alexa_config_migrate_expose_entity_prefs_v2_exposed(
hass: HomeAssistant,
cloud_prefs: CloudPreferences,
entity_registry: er.EntityRegistry,
) -> None:
"""Test migrating Alexa entity config from v2 to v3 when an entity is exposed."""
hass.state = CoreState.starting
assert await async_setup_component(hass, "homeassistant", {})
hass.states.async_set("light.state_only", "on")
entity_migrated = entity_registry.async_get_or_create(
"light",
"test",
"light_migrated",
suggested_object_id="migrated",
)
await cloud_prefs.async_update(
alexa_enabled=True,
alexa_report_state=False,
alexa_settings_version=2,
)
expose_entity(hass, "light.state_only", False)
expose_entity(hass, entity_migrated.entity_id, True)
cloud_prefs._prefs[PREF_ALEXA_ENTITY_CONFIGS]["light.state_only"] = {
PREF_SHOULD_EXPOSE: True
}
cloud_prefs._prefs[PREF_ALEXA_ENTITY_CONFIGS][entity_migrated.entity_id] = {
PREF_SHOULD_EXPOSE: True
}
conf = alexa_config.CloudAlexaConfig(
hass, ALEXA_SCHEMA({}), "mock-user-id", cloud_prefs, Mock(is_logged_in=False)
)
await conf.async_initialize()
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
assert async_get_entity_settings(hass, "light.state_only") == {
"cloud.alexa": {"should_expose": False}
}
assert async_get_entity_settings(hass, entity_migrated.entity_id) == {
"cloud.alexa": {"should_expose": True}
}
async def test_alexa_config_migrate_expose_entity_prefs_default_none(
hass: HomeAssistant,
cloud_prefs: CloudPreferences,

View File

@ -483,10 +483,12 @@ async def test_google_handle_logout(
assert len(mock_enable.return_value.mock_calls) == 1
@pytest.mark.parametrize("google_settings_version", [1, 2])
async def test_google_config_migrate_expose_entity_prefs(
hass: HomeAssistant,
cloud_prefs: CloudPreferences,
entity_registry: er.EntityRegistry,
google_settings_version: int,
) -> None:
"""Test migrating Google entity config."""
hass.state = CoreState.starting
@ -540,7 +542,7 @@ async def test_google_config_migrate_expose_entity_prefs(
await cloud_prefs.async_update(
google_enabled=True,
google_report_state=False,
google_settings_version=1,
google_settings_version=google_settings_version,
)
expose_entity(hass, entity_migrated.entity_id, False)
@ -596,6 +598,100 @@ async def test_google_config_migrate_expose_entity_prefs(
}
async def test_google_config_migrate_expose_entity_prefs_v2_no_exposed(
hass: HomeAssistant,
cloud_prefs: CloudPreferences,
entity_registry: er.EntityRegistry,
) -> None:
"""Test migrating Google entity config from v2 to v3 when no entity is exposed."""
hass.state = CoreState.starting
assert await async_setup_component(hass, "homeassistant", {})
hass.states.async_set("light.state_only", "on")
entity_migrated = entity_registry.async_get_or_create(
"light",
"test",
"light_migrated",
suggested_object_id="migrated",
)
await cloud_prefs.async_update(
google_enabled=True,
google_report_state=False,
google_settings_version=2,
)
expose_entity(hass, "light.state_only", False)
expose_entity(hass, entity_migrated.entity_id, False)
cloud_prefs._prefs[PREF_GOOGLE_ENTITY_CONFIGS]["light.state_only"] = {
PREF_SHOULD_EXPOSE: True
}
cloud_prefs._prefs[PREF_GOOGLE_ENTITY_CONFIGS][entity_migrated.entity_id] = {
PREF_SHOULD_EXPOSE: True
}
conf = CloudGoogleConfig(
hass, GACTIONS_SCHEMA({}), "mock-user-id", cloud_prefs, Mock(is_logged_in=False)
)
await conf.async_initialize()
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
assert async_get_entity_settings(hass, "light.state_only") == {
"cloud.google_assistant": {"should_expose": True}
}
assert async_get_entity_settings(hass, entity_migrated.entity_id) == {
"cloud.google_assistant": {"should_expose": True}
}
async def test_google_config_migrate_expose_entity_prefs_v2_exposed(
hass: HomeAssistant,
cloud_prefs: CloudPreferences,
entity_registry: er.EntityRegistry,
) -> None:
"""Test migrating Google entity config from v2 to v3 when an entity is exposed."""
hass.state = CoreState.starting
assert await async_setup_component(hass, "homeassistant", {})
hass.states.async_set("light.state_only", "on")
entity_migrated = entity_registry.async_get_or_create(
"light",
"test",
"light_migrated",
suggested_object_id="migrated",
)
await cloud_prefs.async_update(
google_enabled=True,
google_report_state=False,
google_settings_version=2,
)
expose_entity(hass, "light.state_only", False)
expose_entity(hass, entity_migrated.entity_id, True)
cloud_prefs._prefs[PREF_GOOGLE_ENTITY_CONFIGS]["light.state_only"] = {
PREF_SHOULD_EXPOSE: True
}
cloud_prefs._prefs[PREF_GOOGLE_ENTITY_CONFIGS][entity_migrated.entity_id] = {
PREF_SHOULD_EXPOSE: True
}
conf = CloudGoogleConfig(
hass, GACTIONS_SCHEMA({}), "mock-user-id", cloud_prefs, Mock(is_logged_in=False)
)
await conf.async_initialize()
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
assert async_get_entity_settings(hass, "light.state_only") == {
"cloud.google_assistant": {"should_expose": False}
}
assert async_get_entity_settings(hass, entity_migrated.entity_id) == {
"cloud.google_assistant": {"should_expose": True}
}
async def test_google_config_migrate_expose_entity_prefs_default_none(
hass: HomeAssistant,
cloud_prefs: CloudPreferences,

View File

@ -98,8 +98,8 @@ def setup_mock_onvif_camera(
)
else:
mock_onvif_camera.update_xaddrs = AsyncMock(return_value=True)
mock_onvif_camera.create_devicemgmt_service = MagicMock(return_value=devicemgmt)
mock_onvif_camera.create_media_service = MagicMock(return_value=media_service)
mock_onvif_camera.create_devicemgmt_service = AsyncMock(return_value=devicemgmt)
mock_onvif_camera.create_media_service = AsyncMock(return_value=media_service)
mock_onvif_camera.close = AsyncMock(return_value=None)
def mock_constructor(

View File

@ -27,7 +27,7 @@ async def test_reboot_button(hass: HomeAssistant) -> None:
async def test_reboot_button_press(hass: HomeAssistant) -> None:
"""Test Reboot button press."""
_, camera, _ = await setup_onvif_integration(hass)
devicemgmt = camera.create_devicemgmt_service()
devicemgmt = await camera.create_devicemgmt_service()
devicemgmt.SystemReboot = AsyncMock(return_value=True)
await hass.services.async_call(

View File

@ -102,7 +102,7 @@ INDEXED_SENSOR_CONFIG_2 = {
}
NESTED_SENSOR_CONFIG = {
NESTED_SENSOR_CONFIG_1 = {
"sn": {
"Time": "2020-03-03T00:00:00+00:00",
"TX23": {
@ -119,6 +119,17 @@ NESTED_SENSOR_CONFIG = {
}
}
NESTED_SENSOR_CONFIG_2 = {
"sn": {
"Time": "2023-01-27T11:04:56",
"DS18B20": {
"Id": "01191ED79190",
"Temperature": 2.4,
},
"TempUnit": "C",
}
}
async def test_controlling_state_via_mqtt(
hass: HomeAssistant, mqtt_mock: MqttMockHAClient, setup_tasmota
@ -174,12 +185,59 @@ async def test_controlling_state_via_mqtt(
assert state.state == "20.0"
@pytest.mark.parametrize(
("sensor_config", "entity_ids", "messages", "states"),
[
(
NESTED_SENSOR_CONFIG_1,
["sensor.tasmota_tx23_speed_act", "sensor.tasmota_tx23_dir_card"],
(
'{"TX23":{"Speed":{"Act":"12.3"},"Dir": {"Card": "WSW"}}}',
'{"StatusSNS":{"TX23":{"Speed":{"Act":"23.4"},"Dir": {"Card": "ESE"}}}}',
),
(
{
"sensor.tasmota_tx23_speed_act": "12.3",
"sensor.tasmota_tx23_dir_card": "WSW",
},
{
"sensor.tasmota_tx23_speed_act": "23.4",
"sensor.tasmota_tx23_dir_card": "ESE",
},
),
),
(
NESTED_SENSOR_CONFIG_2,
["sensor.tasmota_ds18b20_temperature", "sensor.tasmota_ds18b20_id"],
(
'{"DS18B20":{"Id": "01191ED79190","Temperature": 12.3}}',
'{"StatusSNS":{"DS18B20":{"Id": "meep","Temperature": 23.4}}}',
),
(
{
"sensor.tasmota_ds18b20_temperature": "12.3",
"sensor.tasmota_ds18b20_id": "01191ED79190",
},
{
"sensor.tasmota_ds18b20_temperature": "23.4",
"sensor.tasmota_ds18b20_id": "meep",
},
),
),
],
)
async def test_nested_sensor_state_via_mqtt(
hass: HomeAssistant, mqtt_mock: MqttMockHAClient, setup_tasmota
hass: HomeAssistant,
mqtt_mock: MqttMockHAClient,
setup_tasmota,
sensor_config,
entity_ids,
messages,
states,
) -> None:
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(NESTED_SENSOR_CONFIG)
sensor_config = copy.deepcopy(sensor_config)
mac = config["mac"]
async_fire_mqtt_message(
@ -195,31 +253,29 @@ async def test_nested_sensor_state_via_mqtt(
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_tx23_speed_act")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
for entity_id in entity_ids:
state = hass.states.get(entity_id)
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_tx23_speed_act")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
for entity_id in entity_ids:
state = hass.states.get(entity_id)
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
# Test periodic state update
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/SENSOR", '{"TX23":{"Speed":{"Act":"12.3"}}}'
)
state = hass.states.get("sensor.tasmota_tx23_speed_act")
assert state.state == "12.3"
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/SENSOR", messages[0])
for entity_id in entity_ids:
state = hass.states.get(entity_id)
assert state.state == states[0][entity_id]
# Test polled state update
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/stat/STATUS10",
'{"StatusSNS":{"TX23":{"Speed":{"Act":"23.4"}}}}',
)
state = hass.states.get("sensor.tasmota_tx23_speed_act")
assert state.state == "23.4"
async_fire_mqtt_message(hass, "tasmota_49A3BC/stat/STATUS10", messages[1])
for entity_id in entity_ids:
state = hass.states.get(entity_id)
assert state.state == states[1][entity_id]
async def test_indexed_sensor_state_via_mqtt(
@ -728,7 +784,7 @@ async def test_nested_sensor_attributes(
) -> None:
"""Test correct attributes for sensors."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(NESTED_SENSOR_CONFIG)
sensor_config = copy.deepcopy(NESTED_SENSOR_CONFIG_1)
mac = config["mac"]
async_fire_mqtt_message(
@ -754,7 +810,7 @@ async def test_nested_sensor_attributes(
assert state.attributes.get("device_class") is None
assert state.attributes.get("friendly_name") == "Tasmota TX23 Dir Avg"
assert state.attributes.get("icon") is None
assert state.attributes.get("unit_of_measurement") == " "
assert state.attributes.get("unit_of_measurement") is None
async def test_indexed_sensor_attributes(

View File

@ -2,7 +2,11 @@
from unittest.mock import MagicMock, patch
import pytest
from transmission_rpc.error import TransmissionError
from transmission_rpc.error import (
TransmissionAuthError,
TransmissionConnectError,
TransmissionError,
)
from homeassistant import config_entries
from homeassistant.components import transmission
@ -125,7 +129,7 @@ async def test_error_on_wrong_credentials(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
mock_api.side_effect = TransmissionError("401: Unauthorized")
mock_api.side_effect = TransmissionAuthError()
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
MOCK_CONFIG_DATA,
@ -137,6 +141,21 @@ async def test_error_on_wrong_credentials(
}
async def test_unexpected_error(hass: HomeAssistant, mock_api: MagicMock) -> None:
"""Test we handle unexpected error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
mock_api.side_effect = TransmissionError()
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
MOCK_CONFIG_DATA,
)
assert result2["type"] == FlowResultType.FORM
assert result2["errors"] == {"base": "cannot_connect"}
async def test_error_on_connection_failure(
hass: HomeAssistant, mock_api: MagicMock
) -> None:
@ -145,7 +164,7 @@ async def test_error_on_connection_failure(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
mock_api.side_effect = TransmissionError("111: Connection refused")
mock_api.side_effect = TransmissionConnectError()
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
MOCK_CONFIG_DATA,
@ -213,7 +232,7 @@ async def test_reauth_failed(hass: HomeAssistant, mock_api: MagicMock) -> None:
assert result["step_id"] == "reauth_confirm"
assert result["description_placeholders"] == {"username": "user"}
mock_api.side_effect = TransmissionError("401: Unauthorized")
mock_api.side_effect = TransmissionAuthError()
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
@ -248,7 +267,7 @@ async def test_reauth_failed_connection_error(
assert result["step_id"] == "reauth_confirm"
assert result["description_placeholders"] == {"username": "user"}
mock_api.side_effect = TransmissionError("111: Connection refused")
mock_api.side_effect = TransmissionConnectError()
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{

View File

@ -3,7 +3,11 @@
from unittest.mock import MagicMock, patch
import pytest
from transmission_rpc.error import TransmissionError
from transmission_rpc.error import (
TransmissionAuthError,
TransmissionConnectError,
TransmissionError,
)
from homeassistant.components.transmission.const import DOMAIN
from homeassistant.config_entries import ConfigEntryState
@ -40,7 +44,7 @@ async def test_setup_failed_connection_error(
entry = MockConfigEntry(domain=DOMAIN, data=MOCK_CONFIG_DATA)
entry.add_to_hass(hass)
mock_api.side_effect = TransmissionError("111: Connection refused")
mock_api.side_effect = TransmissionConnectError()
await hass.config_entries.async_setup(entry.entry_id)
assert entry.state == ConfigEntryState.SETUP_RETRY
@ -54,7 +58,21 @@ async def test_setup_failed_auth_error(
entry = MockConfigEntry(domain=DOMAIN, data=MOCK_CONFIG_DATA)
entry.add_to_hass(hass)
mock_api.side_effect = TransmissionError("401: Unauthorized")
mock_api.side_effect = TransmissionAuthError()
await hass.config_entries.async_setup(entry.entry_id)
assert entry.state == ConfigEntryState.SETUP_ERROR
async def test_setup_failed_unexpected_error(
hass: HomeAssistant, mock_api: MagicMock
) -> None:
"""Test integration failed due to unexpected error."""
entry = MockConfigEntry(domain=DOMAIN, data=MOCK_CONFIG_DATA)
entry.add_to_hass(hass)
mock_api.side_effect = TransmissionError()
await hass.config_entries.async_setup(entry.entry_id)
assert entry.state == ConfigEntryState.SETUP_ERROR

View File

@ -1,11 +1,13 @@
"""Test ZHA Core cluster handlers."""
import asyncio
from collections.abc import Callable
import logging
import math
from unittest import mock
from unittest.mock import AsyncMock, patch
import pytest
import zigpy.device
import zigpy.endpoint
from zigpy.endpoint import Endpoint as ZigpyEndpoint
import zigpy.profiles.zha
@ -791,3 +793,41 @@ async def test_configure_reporting(hass: HomeAssistant, endpoint) -> None:
}
),
]
async def test_invalid_cluster_handler(hass: HomeAssistant, caplog) -> None:
"""Test setting up a cluster handler that fails to match properly."""
class TestZigbeeClusterHandler(cluster_handlers.ClusterHandler):
REPORT_CONFIG = (
cluster_handlers.AttrReportConfig(attr="missing_attr", config=(1, 60, 1)),
)
mock_device = mock.AsyncMock(spec_set=zigpy.device.Device)
zigpy_ep = zigpy.endpoint.Endpoint(mock_device, endpoint_id=1)
cluster = zigpy_ep.add_input_cluster(zigpy.zcl.clusters.lighting.Color.cluster_id)
cluster.configure_reporting_multiple = AsyncMock(
spec_set=cluster.configure_reporting_multiple,
return_value=[
foundation.ConfigureReportingResponseRecord(
status=foundation.Status.SUCCESS
)
],
)
mock_zha_device = mock.AsyncMock(spec_set=ZHADevice)
zha_endpoint = Endpoint(zigpy_ep, mock_zha_device)
# The cluster handler throws an error when matching this cluster
with pytest.raises(KeyError):
TestZigbeeClusterHandler(cluster, zha_endpoint)
# And one is also logged at runtime
with patch.dict(
registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY,
{cluster.cluster_id: TestZigbeeClusterHandler},
), caplog.at_level(logging.WARNING):
zha_endpoint.add_all_cluster_handlers()
assert "missing_attr" in caplog.text