This commit is contained in:
Franck Nijhof 2023-05-23 23:51:57 +02:00 committed by GitHub
commit 3107d7514c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
82 changed files with 979 additions and 709 deletions

View File

@ -8,5 +8,5 @@
"iot_class": "cloud_polling",
"loggers": ["accuweather"],
"quality_scale": "platinum",
"requirements": ["accuweather==0.5.1"]
"requirements": ["accuweather==0.5.2"]
}

View File

@ -91,6 +91,16 @@ class AdvantageAirAC(AdvantageAirAcEntity, ClimateEntity):
_attr_max_temp = 32
_attr_min_temp = 16
_attr_hvac_modes = [
HVACMode.OFF,
HVACMode.COOL,
HVACMode.HEAT,
HVACMode.FAN_ONLY,
HVACMode.DRY,
]
_attr_supported_features = ClimateEntityFeature.FAN_MODE
def __init__(self, instance: AdvantageAirData, ac_key: str) -> None:
"""Initialize an AdvantageAir AC unit."""
super().__init__(instance, ac_key)
@ -98,36 +108,14 @@ class AdvantageAirAC(AdvantageAirAcEntity, ClimateEntity):
# Set supported features and HVAC modes based on current operating mode
if self._ac.get(ADVANTAGE_AIR_MYAUTO_ENABLED):
# MyAuto
self._attr_supported_features = (
ClimateEntityFeature.FAN_MODE
| ClimateEntityFeature.TARGET_TEMPERATURE
self._attr_supported_features |= (
ClimateEntityFeature.TARGET_TEMPERATURE
| ClimateEntityFeature.TARGET_TEMPERATURE_RANGE
)
self._attr_hvac_modes = [
HVACMode.OFF,
HVACMode.COOL,
HVACMode.HEAT,
HVACMode.FAN_ONLY,
HVACMode.DRY,
HVACMode.HEAT_COOL,
]
elif self._ac.get(ADVANTAGE_AIR_MYTEMP_ENABLED):
# MyTemp
self._attr_supported_features = ClimateEntityFeature.FAN_MODE
self._attr_hvac_modes = [HVACMode.OFF, HVACMode.COOL, HVACMode.HEAT]
else:
self._attr_hvac_modes += [HVACMode.HEAT_COOL]
elif not self._ac.get(ADVANTAGE_AIR_MYTEMP_ENABLED):
# MyZone
self._attr_supported_features = (
ClimateEntityFeature.FAN_MODE | ClimateEntityFeature.TARGET_TEMPERATURE
)
self._attr_hvac_modes = [
HVACMode.OFF,
HVACMode.COOL,
HVACMode.HEAT,
HVACMode.FAN_ONLY,
HVACMode.DRY,
]
self._attr_supported_features |= ClimateEntityFeature.TARGET_TEMPERATURE
# Add "ezfan" mode if supported
if self._ac.get(ADVANTAGE_AIR_AUTOFAN):

View File

@ -7,7 +7,7 @@
"documentation": "https://www.home-assistant.io/integrations/apple_tv",
"iot_class": "local_push",
"loggers": ["pyatv", "srptools"],
"requirements": ["pyatv==0.10.3"],
"requirements": ["pyatv==0.11.0"],
"zeroconf": [
"_mediaremotetv._tcp.local.",
"_companion-link._tcp.local.",

View File

@ -7,6 +7,7 @@ from itertools import chain
import logging
from aiohttp import ClientError, ClientResponseError
from yalexs.const import DEFAULT_BRAND
from yalexs.doorbell import Doorbell, DoorbellDetail
from yalexs.exceptions import AugustApiAIOHTTPError
from yalexs.lock import Lock, LockDetail
@ -16,7 +17,7 @@ from yalexs_ble import YaleXSBLEDiscovery
from homeassistant.config_entries import SOURCE_INTEGRATION_DISCOVERY, ConfigEntry
from homeassistant.const import CONF_PASSWORD
from homeassistant.core import HomeAssistant, callback
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
from homeassistant.exceptions import (
ConfigEntryAuthFailed,
ConfigEntryNotReady,
@ -25,7 +26,7 @@ from homeassistant.exceptions import (
from homeassistant.helpers import device_registry as dr, discovery_flow
from .activity import ActivityStream
from .const import DOMAIN, MIN_TIME_BETWEEN_DETAIL_UPDATES, PLATFORMS
from .const import CONF_BRAND, DOMAIN, MIN_TIME_BETWEEN_DETAIL_UPDATES, PLATFORMS
from .exceptions import CannotConnect, InvalidAuth, RequireValidation
from .gateway import AugustGateway
from .subscriber import AugustSubscriberMixin
@ -122,19 +123,29 @@ def _async_trigger_ble_lock_discovery(
class AugustData(AugustSubscriberMixin):
"""August data object."""
def __init__(self, hass, config_entry, august_gateway):
def __init__(
self,
hass: HomeAssistant,
config_entry: ConfigEntry,
august_gateway: AugustGateway,
) -> None:
"""Init August data object."""
super().__init__(hass, MIN_TIME_BETWEEN_DETAIL_UPDATES)
self._config_entry = config_entry
self._hass = hass
self._august_gateway = august_gateway
self.activity_stream = None
self.activity_stream: ActivityStream | None = None
self._api = august_gateway.api
self._device_detail_by_id = {}
self._doorbells_by_id = {}
self._locks_by_id = {}
self._house_ids = set()
self._pubnub_unsub = None
self._device_detail_by_id: dict[str, LockDetail | DoorbellDetail] = {}
self._doorbells_by_id: dict[str, Doorbell] = {}
self._locks_by_id: dict[str, Lock] = {}
self._house_ids: set[str] = set()
self._pubnub_unsub: CALLBACK_TYPE | None = None
@property
def brand(self) -> str:
"""Brand of the device."""
return self._config_entry.data.get(CONF_BRAND, DEFAULT_BRAND)
async def async_setup(self):
"""Async setup of august device data and activities."""
@ -185,7 +196,11 @@ class AugustData(AugustSubscriberMixin):
)
await self.activity_stream.async_setup()
pubnub.subscribe(self.async_pubnub_message)
self._pubnub_unsub = async_create_pubnub(user_data["UserID"], pubnub)
self._pubnub_unsub = async_create_pubnub(
user_data["UserID"],
pubnub,
self.brand,
)
if self._locks_by_id:
# Do not prevent setup as the sync can timeout

View File

@ -50,6 +50,7 @@ def _retrieve_online_state(data: AugustData, detail: DoorbellDetail) -> bool:
def _retrieve_motion_state(data: AugustData, detail: DoorbellDetail) -> bool:
assert data.activity_stream is not None
latest = data.activity_stream.get_latest_device_activity(
detail.device_id, {ActivityType.DOORBELL_MOTION}
)
@ -61,6 +62,7 @@ def _retrieve_motion_state(data: AugustData, detail: DoorbellDetail) -> bool:
def _retrieve_image_capture_state(data: AugustData, detail: DoorbellDetail) -> bool:
assert data.activity_stream is not None
latest = data.activity_stream.get_latest_device_activity(
detail.device_id, {ActivityType.DOORBELL_IMAGE_CAPTURE}
)
@ -72,6 +74,7 @@ def _retrieve_image_capture_state(data: AugustData, detail: DoorbellDetail) -> b
def _retrieve_ding_state(data: AugustData, detail: DoorbellDetail) -> bool:
assert data.activity_stream is not None
latest = data.activity_stream.get_latest_device_activity(
detail.device_id, {ActivityType.DOORBELL_DING}
)
@ -211,6 +214,7 @@ class AugustDoorBinarySensor(AugustEntityMixin, BinarySensorEntity):
@callback
def _update_from_data(self):
"""Get the latest state of the sensor and update activity."""
assert self._data.activity_stream is not None
door_activity = self._data.activity_stream.get_latest_device_activity(
self._device_id, {ActivityType.DOOR_OPERATION}
)

View File

@ -1,33 +1,45 @@
"""Config flow for August integration."""
from collections.abc import Mapping
from dataclasses import dataclass
import logging
from typing import Any
import voluptuous as vol
from yalexs.authenticator import ValidationResult
from yalexs.const import BRANDS, DEFAULT_BRAND
from homeassistant import config_entries
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.data_entry_flow import FlowResult
from .const import CONF_LOGIN_METHOD, DOMAIN, LOGIN_METHODS, VERIFICATION_CODE_KEY
from .const import (
CONF_ACCESS_TOKEN_CACHE_FILE,
CONF_BRAND,
CONF_LOGIN_METHOD,
DEFAULT_LOGIN_METHOD,
DOMAIN,
LOGIN_METHODS,
VERIFICATION_CODE_KEY,
)
from .exceptions import CannotConnect, InvalidAuth, RequireValidation
from .gateway import AugustGateway
_LOGGER = logging.getLogger(__name__)
async def async_validate_input(data, august_gateway):
async def async_validate_input(
data: dict[str, Any], august_gateway: AugustGateway
) -> dict[str, Any]:
"""Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
Request configuration steps from the user.
"""
assert august_gateway.authenticator is not None
authenticator = august_gateway.authenticator
if (code := data.get(VERIFICATION_CODE_KEY)) is not None:
result = await august_gateway.authenticator.async_validate_verification_code(
code
)
result = await authenticator.async_validate_verification_code(code)
_LOGGER.debug("Verification code validation: %s", result)
if result != ValidationResult.VALIDATED:
raise RequireValidation
@ -50,6 +62,16 @@ async def async_validate_input(data, august_gateway):
}
@dataclass
class ValidateResult:
"""Result from validation."""
validation_required: bool
info: dict[str, Any]
errors: dict[str, str]
description_placeholders: dict[str, str]
class AugustConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for August."""
@ -57,9 +79,9 @@ class AugustConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
def __init__(self):
"""Store an AugustGateway()."""
self._august_gateway = None
self._user_auth_details = {}
self._needs_reset = False
self._august_gateway: AugustGateway | None = None
self._user_auth_details: dict[str, Any] = {}
self._needs_reset = True
self._mode = None
super().__init__()
@ -70,19 +92,30 @@ class AugustConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
async def async_step_user_validate(self, user_input=None):
"""Handle authentication."""
errors = {}
errors: dict[str, str] = {}
description_placeholders: dict[str, str] = {}
if user_input is not None:
result = await self._async_auth_or_validate(user_input, errors)
if result is not None:
return result
self._user_auth_details.update(user_input)
validate_result = await self._async_auth_or_validate()
description_placeholders = validate_result.description_placeholders
if validate_result.validation_required:
return await self.async_step_validation()
if not (errors := validate_result.errors):
return await self._async_update_or_create_entry(validate_result.info)
return self.async_show_form(
step_id="user_validate",
data_schema=vol.Schema(
{
vol.Required(
CONF_BRAND,
default=self._user_auth_details.get(CONF_BRAND, DEFAULT_BRAND),
): vol.In(BRANDS),
vol.Required(
CONF_LOGIN_METHOD,
default=self._user_auth_details.get(CONF_LOGIN_METHOD, "phone"),
default=self._user_auth_details.get(
CONF_LOGIN_METHOD, DEFAULT_LOGIN_METHOD
),
): vol.In(LOGIN_METHODS),
vol.Required(
CONF_USERNAME,
@ -92,21 +125,27 @@ class AugustConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
}
),
errors=errors,
description_placeholders=description_placeholders,
)
async def async_step_validation(self, user_input=None):
async def async_step_validation(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Handle validation (2fa) step."""
if user_input:
if self._mode == "reauth":
return await self.async_step_reauth_validate(user_input)
return await self.async_step_user_validate(user_input)
previously_failed = VERIFICATION_CODE_KEY in self._user_auth_details
return self.async_show_form(
step_id="validation",
data_schema=vol.Schema(
{vol.Required(VERIFICATION_CODE_KEY): vol.All(str, vol.Strip)}
),
errors={"base": "invalid_verification_code"} if previously_failed else None,
description_placeholders={
CONF_BRAND: self._user_auth_details[CONF_BRAND],
CONF_USERNAME: self._user_auth_details[CONF_USERNAME],
CONF_LOGIN_METHOD: self._user_auth_details[CONF_LOGIN_METHOD],
},
@ -122,49 +161,84 @@ class AugustConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
async def async_step_reauth_validate(self, user_input=None):
"""Handle reauth and validation."""
errors = {}
errors: dict[str, str] = {}
description_placeholders: dict[str, str] = {}
if user_input is not None:
result = await self._async_auth_or_validate(user_input, errors)
if result is not None:
return result
self._user_auth_details.update(user_input)
validate_result = await self._async_auth_or_validate()
description_placeholders = validate_result.description_placeholders
if validate_result.validation_required:
return await self.async_step_validation()
if not (errors := validate_result.errors):
return await self._async_update_or_create_entry(validate_result.info)
return self.async_show_form(
step_id="reauth_validate",
data_schema=vol.Schema(
{
vol.Required(
CONF_BRAND,
default=self._user_auth_details.get(CONF_BRAND, DEFAULT_BRAND),
): vol.In(BRANDS),
vol.Required(CONF_PASSWORD): str,
}
),
errors=errors,
description_placeholders={
description_placeholders=description_placeholders
| {
CONF_USERNAME: self._user_auth_details[CONF_USERNAME],
},
)
async def _async_auth_or_validate(self, user_input, errors):
self._user_auth_details.update(user_input)
await self._august_gateway.async_setup(self._user_auth_details)
async def _async_reset_access_token_cache_if_needed(
self, gateway: AugustGateway, username: str, access_token_cache_file: str | None
) -> None:
"""Reset the access token cache if needed."""
# We need to configure the access token cache file before we setup the gateway
# since we need to reset it if the brand changes BEFORE we setup the gateway
gateway.async_configure_access_token_cache_file(
username, access_token_cache_file
)
if self._needs_reset:
self._needs_reset = False
await self._august_gateway.async_reset_authentication()
await gateway.async_reset_authentication()
async def _async_auth_or_validate(self) -> ValidateResult:
"""Authenticate or validate."""
user_auth_details = self._user_auth_details
gateway = self._august_gateway
assert gateway is not None
await self._async_reset_access_token_cache_if_needed(
gateway,
user_auth_details[CONF_USERNAME],
user_auth_details.get(CONF_ACCESS_TOKEN_CACHE_FILE),
)
await gateway.async_setup(user_auth_details)
errors: dict[str, str] = {}
info: dict[str, Any] = {}
description_placeholders: dict[str, str] = {}
validation_required = False
try:
info = await async_validate_input(
self._user_auth_details,
self._august_gateway,
)
info = await async_validate_input(user_auth_details, gateway)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except RequireValidation:
return await self.async_step_validation()
except Exception: # pylint: disable=broad-except
validation_required = True
except Exception as ex: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
errors["base"] = "unhandled"
description_placeholders = {"error": str(ex)}
if errors:
return None
return ValidateResult(
validation_required, info, errors, description_placeholders
)
async def _async_update_or_create_entry(self, info: dict[str, Any]) -> FlowResult:
"""Update existing entry or create a new one."""
existing_entry = await self.async_set_unique_id(
self._user_auth_details[CONF_USERNAME]
)

View File

@ -7,6 +7,7 @@ from homeassistant.const import Platform
DEFAULT_TIMEOUT = 25
CONF_ACCESS_TOKEN_CACHE_FILE = "access_token_cache_file"
CONF_BRAND = "brand"
CONF_LOGIN_METHOD = "login_method"
CONF_INSTALL_ID = "install_id"
@ -42,6 +43,7 @@ MIN_TIME_BETWEEN_DETAIL_UPDATES = timedelta(hours=1)
ACTIVITY_UPDATE_INTERVAL = timedelta(seconds=10)
LOGIN_METHODS = ["phone", "email"]
DEFAULT_LOGIN_METHOD = "email"
PLATFORMS = [
Platform.BUTTON,

View File

@ -3,12 +3,14 @@ from __future__ import annotations
from typing import Any
from yalexs.const import DEFAULT_BRAND
from homeassistant.components.diagnostics import async_redact_data
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from . import AugustData
from .const import DOMAIN
from .const import CONF_BRAND, DOMAIN
TO_REDACT = {
"HouseID",
@ -44,4 +46,5 @@ async def async_get_config_entry_diagnostics(
)
for doorbell in data.doorbells
},
"brand": entry.data.get(CONF_BRAND, DEFAULT_BRAND),
}

View File

@ -3,6 +3,7 @@ from abc import abstractmethod
from yalexs.doorbell import Doorbell
from yalexs.lock import Lock
from yalexs.util import get_configuration_url
from homeassistant.core import callback
from homeassistant.helpers.entity import DeviceInfo, Entity
@ -30,7 +31,7 @@ class AugustEntityMixin(Entity):
name=device.device_name,
sw_version=self._detail.firmware_version,
suggested_area=_remove_device_types(device.device_name, DEVICE_TYPES),
configuration_url="https://account.august.com",
configuration_url=get_configuration_url(data.brand),
)
@property

View File

@ -1,19 +1,26 @@
"""Handle August connection setup and authentication."""
import asyncio
from collections.abc import Mapping
from http import HTTPStatus
import logging
import os
from typing import Any
from aiohttp import ClientError, ClientResponseError
from yalexs.api_async import ApiAsync
from yalexs.authenticator_async import AuthenticationState, AuthenticatorAsync
from yalexs.authenticator_common import Authentication
from yalexs.const import DEFAULT_BRAND
from yalexs.exceptions import AugustApiAIOHTTPError
from homeassistant.const import CONF_PASSWORD, CONF_TIMEOUT, CONF_USERNAME
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import aiohttp_client
from .const import (
CONF_ACCESS_TOKEN_CACHE_FILE,
CONF_BRAND,
CONF_INSTALL_ID,
CONF_LOGIN_METHOD,
DEFAULT_AUGUST_CONFIG_FILE,
@ -28,48 +35,59 @@ _LOGGER = logging.getLogger(__name__)
class AugustGateway:
"""Handle the connection to August."""
def __init__(self, hass):
def __init__(self, hass: HomeAssistant) -> None:
"""Init the connection."""
# Create an aiohttp session instead of using the default one since the
# default one is likely to trigger august's WAF if another integration
# is also using Cloudflare
self._aiohttp_session = aiohttp_client.async_create_clientsession(hass)
self._token_refresh_lock = asyncio.Lock()
self._access_token_cache_file = None
self._hass = hass
self._config = None
self.api = None
self.authenticator = None
self.authentication = None
self._access_token_cache_file: str | None = None
self._hass: HomeAssistant = hass
self._config: Mapping[str, Any] | None = None
self.api: ApiAsync | None = None
self.authenticator: AuthenticatorAsync | None = None
self.authentication: Authentication | None = None
@property
def access_token(self):
"""Access token for the api."""
return self.authentication.access_token
def config_entry(self):
def config_entry(self) -> dict[str, Any]:
"""Config entry."""
assert self._config is not None
return {
CONF_BRAND: self._config.get(CONF_BRAND, DEFAULT_BRAND),
CONF_LOGIN_METHOD: self._config[CONF_LOGIN_METHOD],
CONF_USERNAME: self._config[CONF_USERNAME],
CONF_INSTALL_ID: self._config.get(CONF_INSTALL_ID),
CONF_ACCESS_TOKEN_CACHE_FILE: self._access_token_cache_file,
}
async def async_setup(self, conf):
@callback
def async_configure_access_token_cache_file(
self, username: str, access_token_cache_file: str | None
) -> str:
"""Configure the access token cache file."""
file = access_token_cache_file or f".{username}{DEFAULT_AUGUST_CONFIG_FILE}"
self._access_token_cache_file = file
return self._hass.config.path(file)
async def async_setup(self, conf: Mapping[str, Any]) -> None:
"""Create the api and authenticator objects."""
if conf.get(VERIFICATION_CODE_KEY):
return
self._access_token_cache_file = conf.get(
CONF_ACCESS_TOKEN_CACHE_FILE,
f".{conf[CONF_USERNAME]}{DEFAULT_AUGUST_CONFIG_FILE}",
access_token_cache_file_path = self.async_configure_access_token_cache_file(
conf[CONF_USERNAME], conf.get(CONF_ACCESS_TOKEN_CACHE_FILE)
)
self._config = conf
self.api = ApiAsync(
self._aiohttp_session,
timeout=self._config.get(CONF_TIMEOUT, DEFAULT_TIMEOUT),
brand=self._config.get(CONF_BRAND, DEFAULT_BRAND),
)
self.authenticator = AuthenticatorAsync(
@ -78,9 +96,7 @@ class AugustGateway:
self._config[CONF_USERNAME],
self._config.get(CONF_PASSWORD, ""),
install_id=self._config.get(CONF_INSTALL_ID),
access_token_cache_file=self._hass.config.path(
self._access_token_cache_file
),
access_token_cache_file=access_token_cache_file_path,
)
await self.authenticator.async_setup_authentication()
@ -95,6 +111,10 @@ class AugustGateway:
# authenticated because we can be authenticated
# by have no access
await self.api.async_get_operable_locks(self.access_token)
except AugustApiAIOHTTPError as ex:
if ex.auth_failed:
raise InvalidAuth from ex
raise CannotConnect from ex
except ClientResponseError as ex:
if ex.status == HTTPStatus.UNAUTHORIZED:
raise InvalidAuth from ex
@ -122,8 +142,9 @@ class AugustGateway:
def _reset_authentication(self):
"""Remove the cache file."""
if os.path.exists(self._access_token_cache_file):
os.unlink(self._access_token_cache_file)
path = self._hass.config.path(self._access_token_cache_file)
if os.path.exists(path):
os.unlink(path)
async def async_refresh_access_token_if_needed(self):
"""Refresh the august access token if needed."""

View File

@ -47,6 +47,7 @@ class AugustLock(AugustEntityMixin, RestoreEntity, LockEntity):
async def async_lock(self, **kwargs: Any) -> None:
"""Lock the device."""
assert self._data.activity_stream is not None
if self._data.activity_stream.pubnub.connected:
await self._data.async_lock_async(self._device_id, self._hyper_bridge)
return
@ -54,6 +55,7 @@ class AugustLock(AugustEntityMixin, RestoreEntity, LockEntity):
async def async_unlock(self, **kwargs: Any) -> None:
"""Unlock the device."""
assert self._data.activity_stream is not None
if self._data.activity_stream.pubnub.connected:
await self._data.async_unlock_async(self._device_id, self._hyper_bridge)
return

View File

@ -28,5 +28,5 @@
"documentation": "https://www.home-assistant.io/integrations/august",
"iot_class": "cloud_push",
"loggers": ["pubnub", "yalexs"],
"requirements": ["yalexs==1.3.3", "yalexs-ble==2.1.16"]
"requirements": ["yalexs==1.5.1", "yalexs-ble==2.1.17"]
}

View File

@ -1,7 +1,8 @@
{
"config": {
"error": {
"unknown": "[%key:common::config_flow::error::unknown%]",
"unhandled": "Unhandled error: {error}",
"invalid_verification_code": "Invalid verification code",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]"
},
@ -15,20 +16,22 @@
"data": {
"code": "Verification code"
},
"description": "Please check your {login_method} ({username}) and enter the verification code below"
"description": "Please check your {login_method} ({username}) and enter the verification code below. Codes may take a few minutes to arrive."
},
"user_validate": {
"description": "If the Login Method is 'email', Username is the email address. If the Login Method is 'phone', Username is the phone number in the format '+NNNNNNNNN'.",
"description": "It is recommended to use the 'email' login method as some brands may not work with the 'phone' method. If the Login Method is 'email', Username is the email address. If the Login Method is 'phone', Username is the phone number in the format '+NNNNNNNNN'. If you choose the wrong brand, you may be able to authenticate initially; however, you will not be able to operate devices. If you are unsure of the brand, create the integration again and try another brand.",
"data": {
"password": "[%key:common::config_flow::data::password%]",
"brand": "Brand",
"login_method": "Login Method",
"username": "[%key:common::config_flow::data::username%]",
"login_method": "Login Method"
"password": "[%key:common::config_flow::data::password%]"
},
"title": "Set up an August account"
},
"reauth_validate": {
"description": "Enter the password for {username}.",
"description": "Choose the correct brand for your device, and enter the password for {username}. If you choose the wrong brand, you may be able to authenticate initially; however, you will not be able to operate devices. If you are unsure of the brand, create the integration again and try another brand.",
"data": {
"brand": "[%key:component::august::config::step::user_validate::data::brand%]",
"password": "[%key:common::config_flow::data::password%]"
},
"title": "Reauthenticate an August account"

View File

@ -15,7 +15,7 @@ from homeassistant.core import callback
from homeassistant.data_entry_flow import FlowResult
from . import DOMAIN
from .const import CONF_ALLOWED_REGIONS, CONF_READ_ONLY, CONF_REFRESH_TOKEN
from .const import CONF_ALLOWED_REGIONS, CONF_GCID, CONF_READ_ONLY, CONF_REFRESH_TOKEN
DATA_SCHEMA = vol.Schema(
{
@ -48,6 +48,8 @@ async def validate_input(
retval = {"title": f"{data[CONF_USERNAME]}{data.get(CONF_SOURCE, '')}"}
if auth.refresh_token:
retval[CONF_REFRESH_TOKEN] = auth.refresh_token
if auth.gcid:
retval[CONF_GCID] = auth.gcid
return retval
@ -77,6 +79,7 @@ class BMWConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
entry_data = {
**user_input,
CONF_REFRESH_TOKEN: info.get(CONF_REFRESH_TOKEN),
CONF_GCID: info.get(CONF_GCID),
}
except CannotConnect:
errors["base"] = "cannot_connect"

View File

@ -11,6 +11,7 @@ CONF_ALLOWED_REGIONS = ["china", "north_america", "rest_of_world"]
CONF_READ_ONLY = "read_only"
CONF_ACCOUNT = "account"
CONF_REFRESH_TOKEN = "refresh_token"
CONF_GCID = "gcid"
DATA_HASS_CONFIG = "hass_config"

View File

@ -15,7 +15,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import CONF_READ_ONLY, CONF_REFRESH_TOKEN, DOMAIN
from .const import CONF_GCID, CONF_READ_ONLY, CONF_REFRESH_TOKEN, DOMAIN
DEFAULT_SCAN_INTERVAL_SECONDS = 300
SCAN_INTERVAL = timedelta(seconds=DEFAULT_SCAN_INTERVAL_SECONDS)
@ -41,7 +41,10 @@ class BMWDataUpdateCoordinator(DataUpdateCoordinator[None]):
self._entry = entry
if CONF_REFRESH_TOKEN in entry.data:
self.account.set_refresh_token(entry.data[CONF_REFRESH_TOKEN])
self.account.set_refresh_token(
refresh_token=entry.data[CONF_REFRESH_TOKEN],
gcid=entry.data.get(CONF_GCID),
)
super().__init__(
hass,

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/bmw_connected_drive",
"iot_class": "cloud_polling",
"loggers": ["bimmer_connected"],
"requirements": ["bimmer_connected==0.13.3"]
"requirements": ["bimmer_connected==0.13.5"]
}

View File

@ -8,7 +8,7 @@
"documentation": "https://www.home-assistant.io/integrations/dlna_dmr",
"iot_class": "local_push",
"loggers": ["async_upnp_client"],
"requirements": ["async-upnp-client==0.33.1", "getmac==0.8.2"],
"requirements": ["async-upnp-client==0.33.2", "getmac==0.8.2"],
"ssdp": [
{
"deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1",

View File

@ -8,7 +8,7 @@
"documentation": "https://www.home-assistant.io/integrations/dlna_dms",
"iot_class": "local_polling",
"quality_scale": "platinum",
"requirements": ["async-upnp-client==0.33.1"],
"requirements": ["async-upnp-client==0.33.2"],
"ssdp": [
{
"deviceType": "urn:schemas-upnp-org:device:MediaServer:1",

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/glances",
"iot_class": "local_polling",
"loggers": ["glances_api"],
"requirements": ["glances_api==0.4.1"]
"requirements": ["glances_api==0.4.2"]
}

View File

@ -48,7 +48,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
try:
response = await async_get_clientsession(hass).get(
f"https://alerts.home-assistant.io/alerts/{alert.alert_id}.json",
timeout=aiohttp.ClientTimeout(total=10),
timeout=aiohttp.ClientTimeout(total=30),
)
except asyncio.TimeoutError:
_LOGGER.warning("Error fetching %s: timeout", alert.filename)

View File

@ -201,7 +201,9 @@ class ImapDataUpdateCoordinator(DataUpdateCoordinator[int | None]):
raise UpdateFailed(
f"Invalid response for search '{self.config_entry.data[CONF_SEARCH]}': {result} / {lines[0]}"
)
count: int = len(message_ids := lines[0].split())
if not (count := len(message_ids := lines[0].split())):
self._last_message_id = None
return 0
last_message_id = (
str(message_ids[-1:][0], encoding=self.config_entry.data[CONF_CHARSET])
if count

View File

@ -197,25 +197,23 @@ class IntegrationSensor(RestoreEntity, SensorEntity):
old_state: State | None = event.data.get("old_state")
new_state: State | None = event.data.get("new_state")
if (
source_state := self.hass.states.get(self._sensor_source_id)
) is None or source_state.state == STATE_UNAVAILABLE:
self._attr_available = False
self.async_write_ha_state()
return
self._attr_available = True
if new_state is None or new_state.state in (
STATE_UNKNOWN,
STATE_UNAVAILABLE,
):
return
# We may want to update our state before an early return,
# based on the source sensor's unit_of_measurement
# or device_class.
update_state = False
if (
source_state := self.hass.states.get(self._sensor_source_id)
) is None or source_state.state == STATE_UNAVAILABLE:
self._attr_available = False
update_state = True
else:
self._attr_available = True
if old_state is None or new_state is None:
# we can't calculate the elapsed time, so we can't calculate the integral
return
unit = new_state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
if unit is not None:
new_unit_of_measurement = self._unit(unit)
@ -235,31 +233,53 @@ class IntegrationSensor(RestoreEntity, SensorEntity):
if update_state:
self.async_write_ha_state()
if old_state is None or old_state.state in (
STATE_UNKNOWN,
STATE_UNAVAILABLE,
):
return
try:
# integration as the Riemann integral of previous measures.
area = Decimal(0)
elapsed_time = (
new_state.last_updated - old_state.last_updated
).total_seconds()
if self._method == METHOD_TRAPEZOIDAL:
if (
self._method == METHOD_TRAPEZOIDAL
and new_state.state
not in (
STATE_UNKNOWN,
STATE_UNAVAILABLE,
)
and old_state.state
not in (
STATE_UNKNOWN,
STATE_UNAVAILABLE,
)
):
area = (
(Decimal(new_state.state) + Decimal(old_state.state))
* Decimal(elapsed_time)
/ 2
)
elif self._method == METHOD_LEFT:
elif self._method == METHOD_LEFT and old_state.state not in (
STATE_UNKNOWN,
STATE_UNAVAILABLE,
):
area = Decimal(old_state.state) * Decimal(elapsed_time)
elif self._method == METHOD_RIGHT:
elif self._method == METHOD_RIGHT and new_state.state not in (
STATE_UNKNOWN,
STATE_UNAVAILABLE,
):
area = Decimal(new_state.state) * Decimal(elapsed_time)
else:
_LOGGER.debug(
"Could not apply method %s to %s -> %s",
self._method,
old_state.state,
new_state.state,
)
return
integral = area / (self._unit_prefix * self._unit_time)
_LOGGER.debug(
"area = %s, integral = %s state = %s", area, integral, self._state
)
assert isinstance(integral, Decimal)
except ValueError as err:
_LOGGER.warning("While calculating integration: %s", err)

View File

@ -104,7 +104,7 @@ DISCOVERY_SCHEMAS = [
device_class=BinarySensorDeviceClass.BATTERY,
name="Battery Status",
measurement_to_ha=lambda x: x
!= clusters.PowerSource.Enums.BatChargeLevel.kOk,
!= clusters.PowerSource.Enums.BatChargeLevelEnum.kOk,
),
entity_class=MatterBinarySensor,
required_attributes=(clusters.PowerSource.Attributes.BatChargeLevel,),

View File

@ -1,7 +1,6 @@
"""Matter light."""
from __future__ import annotations
from enum import IntFlag
from typing import Any
from chip.clusters import Objects as clusters
@ -112,7 +111,7 @@ class MatterLight(MatterEntity, LightEntity):
await self.send_device_command(
clusters.ColorControl.Commands.MoveToColorTemperature(
colorTemperature=color_temp,
colorTemperatureMireds=color_temp,
# It's required in TLV. We don't implement transition time yet.
transitionTime=0,
)
@ -307,13 +306,22 @@ class MatterLight(MatterEntity, LightEntity):
assert capabilities is not None
if capabilities & ColorCapabilities.kHueSaturationSupported:
if (
capabilities
& clusters.ColorControl.Bitmaps.ColorCapabilities.kHueSaturationSupported
):
supported_color_modes.add(ColorMode.HS)
if capabilities & ColorCapabilities.kXYAttributesSupported:
if (
capabilities
& clusters.ColorControl.Bitmaps.ColorCapabilities.kXYAttributesSupported
):
supported_color_modes.add(ColorMode.XY)
if capabilities & ColorCapabilities.kColorTemperatureSupported:
if (
capabilities
& clusters.ColorControl.Bitmaps.ColorCapabilities.kColorTemperatureSupported
):
supported_color_modes.add(ColorMode.COLOR_TEMP)
self._attr_supported_color_modes = supported_color_modes
@ -344,18 +352,6 @@ class MatterLight(MatterEntity, LightEntity):
self._attr_brightness = self._get_brightness()
# This enum should be removed once the ColorControlCapabilities enum is added to the CHIP (Matter) library
# clusters.ColorControl.Bitmap.ColorCapabilities
class ColorCapabilities(IntFlag):
"""Color control capabilities bitmap."""
kHueSaturationSupported = 0x1
kEnhancedHueSupported = 0x2
kColorLoopSupported = 0x4
kXYAttributesSupported = 0x8
kColorTemperatureSupported = 0x10
# Discovery schema(s) to map Matter Attributes to HA entities
DISCOVERY_SCHEMAS = [
MatterDiscoverySchema(
@ -372,10 +368,11 @@ DISCOVERY_SCHEMAS = [
clusters.ColorControl.Attributes.CurrentY,
clusters.ColorControl.Attributes.ColorTemperatureMireds,
),
# restrict device type to prevent discovery by the wrong platform
not_device_type=(
device_types.OnOffPlugInUnit,
device_types.DoorLock,
device_type=(
device_types.ColorTemperatureLight,
device_types.DimmableLight,
device_types.ExtendedColorLight,
device_types.OnOffLight,
),
),
]

View File

@ -106,7 +106,7 @@ class MatterLock(MatterEntity, LockEntity):
LOGGER.debug("Door state: %s for %s", door_state, self.entity_id)
self._attr_is_jammed = (
door_state is clusters.DoorLock.Enums.DlDoorState.kDoorJammed
door_state is clusters.DoorLock.Enums.DoorStateEnum.kDoorJammed
)

View File

@ -6,5 +6,5 @@
"dependencies": ["websocket_api"],
"documentation": "https://www.home-assistant.io/integrations/matter",
"iot_class": "local_push",
"requirements": ["python-matter-server==3.2.0"]
"requirements": ["python-matter-server==3.4.1"]
}

View File

@ -69,9 +69,14 @@ DISCOVERY_SCHEMAS = [
required_attributes=(clusters.OnOff.Attributes.OnOff,),
# restrict device type to prevent discovery by the wrong platform
not_device_type=(
device_types.OnOffLight,
device_types.ColorTemperatureLight,
device_types.DimmableLight,
device_types.ExtendedColorLight,
device_types.OnOffLight,
device_types.DoorLock,
device_types.ColorDimmerSwitch,
device_types.DimmerSwitch,
device_types.OnOffLightSwitch,
),
),
]

View File

@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["aionotion"],
"requirements": ["aionotion==2023.05.4"]
"requirements": ["aionotion==2023.05.5"]
}

View File

@ -195,9 +195,12 @@ class NWSSensor(CoordinatorEntity[NwsDataUpdateCoordinator], SensorEntity):
@property
def native_value(self) -> float | None:
"""Return the state."""
value = self._nws.observation.get(self.entity_description.key)
if value is None:
if (
not (observation := self._nws.observation)
or (value := observation.get(self.entity_description.key)) is None
):
return None
# Set alias to unit property -> prevent unnecessary hasattr calls
unit_of_measurement = self.native_unit_of_measurement
if unit_of_measurement == UnitOfSpeed.MILES_PER_HOUR:

View File

@ -5,6 +5,7 @@ import logging
from httpx import RequestError
from onvif.exceptions import ONVIFAuthError, ONVIFError, ONVIFTimeoutError
from onvif.util import is_auth_error, stringify_onvif_error
from zeep.exceptions import Fault, TransportError
from homeassistant.components.ffmpeg import CONF_EXTRA_ARGUMENTS
@ -21,7 +22,6 @@ from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from .const import CONF_SNAPSHOT_AUTH, DEFAULT_ARGUMENTS, DOMAIN
from .device import ONVIFDevice
from .util import is_auth_error, stringify_onvif_error
LOGGER = logging.getLogger(__name__)

View File

@ -6,6 +6,7 @@ from pprint import pformat
from typing import Any
from urllib.parse import urlparse
from onvif.util import is_auth_error, stringify_onvif_error
import voluptuous as vol
from wsdiscovery.discovery import ThreadedWSDiscovery as WSDiscovery
from wsdiscovery.scope import Scope
@ -40,7 +41,6 @@ from .const import (
LOGGER,
)
from .device import get_device
from .util import is_auth_error, stringify_onvif_error
CONF_MANUAL_INPUT = "Manually configure ONVIF device"

View File

@ -195,7 +195,9 @@ class ONVIFDevice:
await device_mgmt.SetSystemDateAndTime(dt_param)
LOGGER.debug("%s: SetSystemDateAndTime: success", self.name)
return
except Fault:
# Some cameras don't support setting the timezone and will throw an IndexError
# if we try to set it. If we get an error, try again without the timezone.
except (IndexError, Fault):
if idx == timezone_max_idx:
raise
@ -280,7 +282,7 @@ class ONVIFDevice:
# Set Date and Time ourselves if Date and Time is set manually in the camera.
try:
await self.async_manually_set_date_and_time()
except (RequestError, TransportError):
except (RequestError, TransportError, IndexError, Fault):
LOGGER.warning("%s: Could not sync date/time on this camera", self.name)
async def async_get_device_info(self) -> DeviceInfo:

View File

@ -3,32 +3,30 @@ from __future__ import annotations
import asyncio
from collections.abc import Callable
from contextlib import suppress
import datetime as dt
from aiohttp.web import Request
from httpx import RemoteProtocolError, RequestError, TransportError
from onvif import ONVIFCamera, ONVIFService
from onvif.client import NotificationManager, retry_connection_error
from onvif import ONVIFCamera
from onvif.client import (
NotificationManager,
PullPointManager as ONVIFPullPointManager,
retry_connection_error,
)
from onvif.exceptions import ONVIFError
from onvif.util import stringify_onvif_error
from zeep.exceptions import Fault, ValidationError, XMLParseError
from homeassistant.components import webhook
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import (
CALLBACK_TYPE,
CoreState,
HassJob,
HomeAssistant,
callback,
)
from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback
from homeassistant.helpers.device_registry import format_mac
from homeassistant.helpers.event import async_call_later
from homeassistant.helpers.network import NoURLAvailableError, get_url
from .const import DOMAIN, LOGGER
from .models import Event, PullPointManagerState, WebHookManagerState
from .parsers import PARSERS
from .util import stringify_onvif_error
# Topics in this list are ignored because we do not want to create
# entities for them.
@ -51,11 +49,6 @@ RENEW_ERRORS = (ONVIFError, RequestError, XMLParseError, *SUBSCRIPTION_ERRORS)
#
SUBSCRIPTION_TIME = dt.timedelta(minutes=10)
# SUBSCRIPTION_RELATIVE_TIME uses a relative time since the time on the camera
# is not reliable. We use 600 seconds (10 minutes) since some cameras cannot
# parse time in the format "PT10M" (10 minutes).
SUBSCRIPTION_RELATIVE_TIME = "PT600S"
# SUBSCRIPTION_RENEW_INTERVAL Must be less than the
# overall timeout of 90 * (SUBSCRIPTION_ATTEMPTS) 2 = 180 seconds
#
@ -106,18 +99,13 @@ class EventManager:
or self.pullpoint_manager.state == PullPointManagerState.STARTED
)
@property
def has_listeners(self) -> bool:
"""Return if there are listeners."""
return bool(self._listeners)
@callback
def async_add_listener(self, update_callback: CALLBACK_TYPE) -> Callable[[], None]:
"""Listen for data updates."""
# This is the first listener, set up polling.
if not self._listeners:
self.pullpoint_manager.async_schedule_pull_messages()
# We always have to listen for events or we will never
# know which sensors to create. In practice we always have
# a listener anyways since binary_sensor and sensor will
# create a listener when they are created.
self._listeners.append(update_callback)
@callback
@ -133,9 +121,6 @@ class EventManager:
if update_callback in self._listeners:
self._listeners.remove(update_callback)
if not self._listeners:
self.pullpoint_manager.async_cancel_pull_messages()
async def async_start(self, try_pullpoint: bool, try_webhook: bool) -> bool:
"""Start polling events."""
# Always start pull point first, since it will populate the event list
@ -255,22 +240,14 @@ class PullPointManager:
self._hass = event_manager.hass
self._name = event_manager.name
self._pullpoint_subscription: ONVIFService = None
self._pullpoint_service: ONVIFService = None
self._pull_lock: asyncio.Lock = asyncio.Lock()
self._pullpoint_manager: ONVIFPullPointManager | None = None
self._cancel_pull_messages: CALLBACK_TYPE | None = None
self._cancel_pullpoint_renew: CALLBACK_TYPE | None = None
self._renew_lock: asyncio.Lock = asyncio.Lock()
self._renew_or_restart_job = HassJob(
self._async_renew_or_restart_pullpoint,
f"{self._name}: renew or restart pullpoint",
)
self._pull_messages_job = HassJob(
self._async_background_pull_messages,
self._async_background_pull_messages_or_reschedule,
f"{self._name}: pull messages",
)
self._pull_messages_task: asyncio.Task[None] | None = None
async def async_start(self) -> bool:
"""Start pullpoint subscription."""
@ -282,6 +259,7 @@ class PullPointManager:
self.state = PullPointManagerState.FAILED
return False
self.state = PullPointManagerState.STARTED
self.async_schedule_pull_messages()
return True
@callback
@ -291,8 +269,9 @@ class PullPointManager:
self.state = PullPointManagerState.PAUSED
# Cancel the renew job so we don't renew the subscription
# and stop pulling messages.
self._async_cancel_pullpoint_renew()
self.async_cancel_pull_messages()
if self._pullpoint_manager:
self._pullpoint_manager.pause()
# We do not unsubscribe from the pullpoint subscription and instead
# let the subscription expire since some cameras will terminate all
# subscriptions if we unsubscribe which will break the webhook.
@ -302,17 +281,149 @@ class PullPointManager:
"""Resume pullpoint subscription."""
LOGGER.debug("%s: Resuming PullPoint manager", self._name)
self.state = PullPointManagerState.STARTED
self.async_schedule_pullpoint_renew(0.0)
if self._pullpoint_manager:
self._pullpoint_manager.resume()
self.async_schedule_pull_messages()
@callback
def async_schedule_pullpoint_renew(self, delay: float) -> None:
"""Schedule PullPoint subscription renewal."""
self._async_cancel_pullpoint_renew()
self._cancel_pullpoint_renew = async_call_later(
self._hass,
delay,
self._renew_or_restart_job,
async def async_stop(self) -> None:
"""Unsubscribe from PullPoint and cancel callbacks."""
self.state = PullPointManagerState.STOPPED
await self._async_cancel_and_unsubscribe()
async def _async_start_pullpoint(self) -> bool:
"""Start pullpoint subscription."""
try:
await self._async_create_pullpoint_subscription()
except CREATE_ERRORS as err:
LOGGER.debug(
"%s: Device does not support PullPoint service or has too many subscriptions: %s",
self._name,
stringify_onvif_error(err),
)
return False
return True
async def _async_cancel_and_unsubscribe(self) -> None:
"""Cancel and unsubscribe from PullPoint."""
self.async_cancel_pull_messages()
if self._pull_messages_task:
self._pull_messages_task.cancel()
await self._async_unsubscribe_pullpoint()
@retry_connection_error(SUBSCRIPTION_ATTEMPTS)
async def _async_create_pullpoint_subscription(self) -> None:
"""Create pullpoint subscription."""
self._pullpoint_manager = await self._device.create_pullpoint_manager(
SUBSCRIPTION_TIME, self._event_manager.async_mark_events_stale
)
await self._pullpoint_manager.set_synchronization_point()
async def _async_unsubscribe_pullpoint(self) -> None:
"""Unsubscribe the pullpoint subscription."""
if not self._pullpoint_manager or self._pullpoint_manager.closed:
return
LOGGER.debug("%s: Unsubscribing from PullPoint", self._name)
try:
await self._pullpoint_manager.shutdown()
except UNSUBSCRIBE_ERRORS as err:
LOGGER.debug(
(
"%s: Failed to unsubscribe PullPoint subscription;"
" This is normal if the device restarted: %s"
),
self._name,
stringify_onvif_error(err),
)
self._pullpoint_manager = None
async def _async_pull_messages(self) -> None:
"""Pull messages from device."""
if self._pullpoint_manager is None:
return
service = self._pullpoint_manager.get_service()
LOGGER.debug(
"%s: Pulling PullPoint messages timeout=%s limit=%s",
self._name,
PULLPOINT_POLL_TIME,
PULLPOINT_MESSAGE_LIMIT,
)
next_pull_delay = None
response = None
try:
if self._hass.is_running:
response = await service.PullMessages(
{
"MessageLimit": PULLPOINT_MESSAGE_LIMIT,
"Timeout": PULLPOINT_POLL_TIME,
}
)
else:
LOGGER.debug(
"%s: PullPoint skipped because Home Assistant is not running yet",
self._name,
)
except RemoteProtocolError as err:
# Either a shutdown event or the camera closed the connection. Because
# http://datatracker.ietf.org/doc/html/rfc2616#section-8.1.4 allows the server
# to close the connection at any time, we treat this as a normal. Some
# cameras may close the connection if there are no messages to pull.
LOGGER.debug(
"%s: PullPoint subscription encountered a remote protocol error "
"(this is normal for some cameras): %s",
self._name,
stringify_onvif_error(err),
)
except Fault as err:
# Device may not support subscriptions so log at debug level
# when we get an XMLParseError
LOGGER.debug(
"%s: Failed to fetch PullPoint subscription messages: %s",
self._name,
stringify_onvif_error(err),
)
# Treat errors as if the camera restarted. Assume that the pullpoint
# subscription is no longer valid.
self._pullpoint_manager.resume()
except (XMLParseError, RequestError, TimeoutError, TransportError) as err:
LOGGER.debug(
"%s: PullPoint subscription encountered an unexpected error and will be retried "
"(this is normal for some cameras): %s",
self._name,
stringify_onvif_error(err),
)
# Avoid renewing the subscription too often since it causes problems
# for some cameras, mainly the Tapo ones.
next_pull_delay = SUBSCRIPTION_RESTART_INTERVAL_ON_ERROR
finally:
self.async_schedule_pull_messages(next_pull_delay)
if self.state != PullPointManagerState.STARTED:
# If the webhook became started working during the long poll,
# and we got paused, our data is stale and we should not process it.
LOGGER.debug(
"%s: PullPoint state is %s (likely due to working webhook), skipping PullPoint messages",
self._name,
self.state,
)
return
if not response:
return
# Parse response
event_manager = self._event_manager
if (notification_message := response.NotificationMessage) and (
number_of_events := len(notification_message)
):
LOGGER.debug(
"%s: continuous PullMessages: %s event(s)",
self._name,
number_of_events,
)
await event_manager.async_parse_messages(notification_message)
event_manager.async_callback_listeners()
else:
LOGGER.debug("%s: continuous PullMessages: no events", self._name)
@callback
def async_cancel_pull_messages(self) -> None:
@ -332,275 +443,29 @@ class PullPointManager:
self.async_cancel_pull_messages()
if self.state != PullPointManagerState.STARTED:
return
if self._pullpoint_service:
if self._pullpoint_manager:
when = delay if delay is not None else PULLPOINT_COOLDOWN_TIME
self._cancel_pull_messages = async_call_later(
self._hass, when, self._pull_messages_job
)
async def async_stop(self) -> None:
"""Unsubscribe from PullPoint and cancel callbacks."""
self.state = PullPointManagerState.STOPPED
await self._async_cancel_and_unsubscribe()
async def _async_start_pullpoint(self) -> bool:
"""Start pullpoint subscription."""
try:
started = await self._async_create_pullpoint_subscription()
except CREATE_ERRORS as err:
LOGGER.debug(
"%s: Device does not support PullPoint service or has too many subscriptions: %s",
self._name,
stringify_onvif_error(err),
)
return False
if started:
self.async_schedule_pullpoint_renew(SUBSCRIPTION_RENEW_INTERVAL)
return started
async def _async_cancel_and_unsubscribe(self) -> None:
"""Cancel and unsubscribe from PullPoint."""
self._async_cancel_pullpoint_renew()
self.async_cancel_pull_messages()
await self._async_unsubscribe_pullpoint()
async def _async_renew_or_restart_pullpoint(
self, now: dt.datetime | None = None
@callback
def _async_background_pull_messages_or_reschedule(
self, _now: dt.datetime | None = None
) -> None:
"""Renew or start pullpoint subscription."""
if self._hass.is_stopping or self.state != PullPointManagerState.STARTED:
return
if self._renew_lock.locked():
LOGGER.debug("%s: PullPoint renew already in progress", self._name)
# Renew is already running, another one will be
# scheduled when the current one is done if needed.
return
async with self._renew_lock:
next_attempt = SUBSCRIPTION_RESTART_INTERVAL_ON_ERROR
try:
if await self._async_renew_pullpoint():
next_attempt = SUBSCRIPTION_RENEW_INTERVAL
else:
await self._async_restart_pullpoint()
finally:
self.async_schedule_pullpoint_renew(next_attempt)
@retry_connection_error(SUBSCRIPTION_ATTEMPTS)
async def _async_create_pullpoint_subscription(self) -> bool:
"""Create pullpoint subscription."""
if not await self._device.create_pullpoint_subscription(
{"InitialTerminationTime": SUBSCRIPTION_RELATIVE_TIME}
):
LOGGER.debug("%s: Failed to create PullPoint subscription", self._name)
return False
# Create subscription manager
self._pullpoint_subscription = await self._device.create_subscription_service(
"PullPointSubscription"
)
# Create the service that will be used to pull messages from the device.
self._pullpoint_service = await self._device.create_pullpoint_service()
# Initialize events
with suppress(*SET_SYNCHRONIZATION_POINT_ERRORS):
sync_result = await self._pullpoint_service.SetSynchronizationPoint()
LOGGER.debug("%s: SetSynchronizationPoint: %s", self._name, sync_result)
# Always schedule an initial pull messages
self.async_schedule_pull_messages(0.0)
return True
@callback
def _async_cancel_pullpoint_renew(self) -> None:
"""Cancel the pullpoint renew task."""
if self._cancel_pullpoint_renew:
self._cancel_pullpoint_renew()
self._cancel_pullpoint_renew = None
async def _async_restart_pullpoint(self) -> bool:
"""Restart the subscription assuming the camera rebooted."""
self.async_cancel_pull_messages()
await self._async_unsubscribe_pullpoint()
restarted = await self._async_start_pullpoint()
if restarted and self._event_manager.has_listeners:
LOGGER.debug("%s: Restarted PullPoint subscription", self._name)
self.async_schedule_pull_messages(0.0)
return restarted
async def _async_unsubscribe_pullpoint(self) -> None:
"""Unsubscribe the pullpoint subscription."""
if (
not self._pullpoint_subscription
or self._pullpoint_subscription.transport.client.is_closed
):
return
LOGGER.debug("%s: Unsubscribing from PullPoint", self._name)
try:
await self._pullpoint_subscription.Unsubscribe()
except UNSUBSCRIBE_ERRORS as err:
LOGGER.debug(
(
"%s: Failed to unsubscribe PullPoint subscription;"
" This is normal if the device restarted: %s"
),
self._name,
stringify_onvif_error(err),
)
self._pullpoint_subscription = None
@retry_connection_error(SUBSCRIPTION_ATTEMPTS)
async def _async_call_pullpoint_subscription_renew(self) -> None:
"""Call PullPoint subscription Renew."""
await self._pullpoint_subscription.Renew(SUBSCRIPTION_RELATIVE_TIME)
async def _async_renew_pullpoint(self) -> bool:
"""Renew the PullPoint subscription."""
if (
not self._pullpoint_subscription
or self._pullpoint_subscription.transport.client.is_closed
):
return False
try:
# The first time we renew, we may get a Fault error so we
# suppress it. The subscription will be restarted in
# async_restart later.
await self._async_call_pullpoint_subscription_renew()
LOGGER.debug("%s: Renewed PullPoint subscription", self._name)
return True
except RENEW_ERRORS as err:
self._event_manager.async_mark_events_stale()
LOGGER.debug(
"%s: Failed to renew PullPoint subscription; %s",
self._name,
stringify_onvif_error(err),
)
return False
async def _async_pull_messages_with_lock(self) -> bool:
"""Pull messages from device while holding the lock.
This function must not be called directly, it should only
be called from _async_pull_messages.
Returns True if the subscription is working.
Returns False if the subscription is not working and should be restarted.
"""
assert self._pull_lock.locked(), "Pull lock must be held"
assert self._pullpoint_service is not None, "PullPoint service does not exist"
event_manager = self._event_manager
LOGGER.debug(
"%s: Pulling PullPoint messages timeout=%s limit=%s",
self._name,
PULLPOINT_POLL_TIME,
PULLPOINT_MESSAGE_LIMIT,
)
try:
response = await self._pullpoint_service.PullMessages(
{
"MessageLimit": PULLPOINT_MESSAGE_LIMIT,
"Timeout": PULLPOINT_POLL_TIME,
}
)
except RemoteProtocolError as err:
# Either a shutdown event or the camera closed the connection. Because
# http://datatracker.ietf.org/doc/html/rfc2616#section-8.1.4 allows the server
# to close the connection at any time, we treat this as a normal. Some
# cameras may close the connection if there are no messages to pull.
LOGGER.debug(
"%s: PullPoint subscription encountered a remote protocol error "
"(this is normal for some cameras): %s",
self._name,
stringify_onvif_error(err),
)
return True
except Fault as err:
# Device may not support subscriptions so log at debug level
# when we get an XMLParseError
LOGGER.debug(
"%s: Failed to fetch PullPoint subscription messages: %s",
self._name,
stringify_onvif_error(err),
)
# Treat errors as if the camera restarted. Assume that the pullpoint
# subscription is no longer valid.
return False
except (XMLParseError, RequestError, TimeoutError, TransportError) as err:
LOGGER.debug(
"%s: PullPoint subscription encountered an unexpected error and will be retried "
"(this is normal for some cameras): %s",
self._name,
stringify_onvif_error(err),
)
# Avoid renewing the subscription too often since it causes problems
# for some cameras, mainly the Tapo ones.
return True
if self.state != PullPointManagerState.STARTED:
# If the webhook became started working during the long poll,
# and we got paused, our data is stale and we should not process it.
LOGGER.debug(
"%s: PullPoint is paused (likely due to working webhook), skipping PullPoint messages",
self._name,
)
return True
# Parse response
if (notification_message := response.NotificationMessage) and (
number_of_events := len(notification_message)
):
LOGGER.debug(
"%s: continuous PullMessages: %s event(s)",
self._name,
number_of_events,
)
await event_manager.async_parse_messages(notification_message)
event_manager.async_callback_listeners()
else:
LOGGER.debug("%s: continuous PullMessages: no events", self._name)
return True
@callback
def _async_background_pull_messages(self, _now: dt.datetime | None = None) -> None:
"""Pull messages from device in the background."""
self._cancel_pull_messages = None
self._hass.async_create_background_task(
if self._pull_messages_task and not self._pull_messages_task.done():
LOGGER.debug(
"%s: PullPoint message pull is already in process, skipping pull",
self._name,
)
self.async_schedule_pull_messages()
return
self._pull_messages_task = self._hass.async_create_background_task(
self._async_pull_messages(),
f"{self._name} background pull messages",
)
async def _async_pull_messages(self) -> None:
"""Pull messages from device."""
event_manager = self._event_manager
if self._pull_lock.locked():
# Pull messages if the lock is not already locked
# any pull will do, so we don't need to wait for the lock
LOGGER.debug(
"%s: PullPoint subscription is already locked, skipping pull",
self._name,
)
return
async with self._pull_lock:
# Before we pop out of the lock we always need to schedule the next pull
# or call async_schedule_pullpoint_renew if the pull fails so the pull
# loop continues.
try:
if self._hass.state == CoreState.running:
if not await self._async_pull_messages_with_lock():
self.async_schedule_pullpoint_renew(0.0)
return
finally:
if event_manager.has_listeners:
self.async_schedule_pull_messages()
class WebHookManager:
"""Manage ONVIF webhook subscriptions.
@ -617,21 +482,21 @@ class WebHookManager:
self._event_manager = event_manager
self._device = event_manager.device
self._hass = event_manager.hass
self._webhook_unique_id = f"{DOMAIN}_{event_manager.config_entry.entry_id}"
config_entry = event_manager.config_entry
self._old_webhook_unique_id = f"{DOMAIN}_{config_entry.entry_id}"
# Some cameras have a limit on the length of the webhook URL
# so we use a shorter unique ID for the webhook.
unique_id = config_entry.unique_id
assert unique_id is not None
webhook_id = format_mac(unique_id).replace(":", "").lower()
self._webhook_unique_id = f"{DOMAIN}{webhook_id}"
self._name = event_manager.name
self._webhook_url: str | None = None
self._webhook_subscription: ONVIFService | None = None
self._notification_manager: NotificationManager | None = None
self._cancel_webhook_renew: CALLBACK_TYPE | None = None
self._renew_lock = asyncio.Lock()
self._renew_or_restart_job = HassJob(
self._async_renew_or_restart_webhook,
f"{self._name}: renew or restart webhook",
)
async def async_start(self) -> bool:
"""Start polling events."""
LOGGER.debug("%s: Starting webhook manager", self._name)
@ -649,20 +514,9 @@ class WebHookManager:
async def async_stop(self) -> None:
"""Unsubscribe from events."""
self.state = WebHookManagerState.STOPPED
self._async_cancel_webhook_renew()
await self._async_unsubscribe_webhook()
self._async_unregister_webhook()
@callback
def _async_schedule_webhook_renew(self, delay: float) -> None:
"""Schedule webhook subscription renewal."""
self._async_cancel_webhook_renew()
self._cancel_webhook_renew = async_call_later(
self._hass,
delay,
self._renew_or_restart_job,
)
@retry_connection_error(SUBSCRIPTION_ATTEMPTS)
async def _async_create_webhook_subscription(self) -> None:
"""Create webhook subscription."""
@ -671,14 +525,12 @@ class WebHookManager:
self._name,
self._webhook_url,
)
self._notification_manager = self._device.create_notification_manager(
{
"InitialTerminationTime": SUBSCRIPTION_RELATIVE_TIME,
"ConsumerReference": {"Address": self._webhook_url},
}
)
try:
self._webhook_subscription = await self._notification_manager.setup()
self._notification_manager = await self._device.create_notification_manager(
address=self._webhook_url,
interval=SUBSCRIPTION_TIME,
subscription_lost_callback=self._event_manager.async_mark_events_stale,
)
except ValidationError as err:
# This should only happen if there is a problem with the webhook URL
# that is causing it to not be well formed.
@ -688,7 +540,7 @@ class WebHookManager:
err,
)
raise
await self._notification_manager.start()
await self._notification_manager.set_synchronization_point()
LOGGER.debug(
"%s: Webhook subscription created with URL: %s",
self._name,
@ -707,62 +559,8 @@ class WebHookManager:
stringify_onvif_error(err),
)
return False
self._async_schedule_webhook_renew(SUBSCRIPTION_RENEW_INTERVAL)
return True
async def _async_restart_webhook(self) -> bool:
"""Restart the webhook subscription assuming the camera rebooted."""
await self._async_unsubscribe_webhook()
return await self._async_start_webhook()
@retry_connection_error(SUBSCRIPTION_ATTEMPTS)
async def _async_call_webhook_subscription_renew(self) -> None:
"""Call PullPoint subscription Renew."""
assert self._webhook_subscription is not None
await self._webhook_subscription.Renew(SUBSCRIPTION_RELATIVE_TIME)
async def _async_renew_webhook(self) -> bool:
"""Renew webhook subscription."""
if (
not self._webhook_subscription
or self._webhook_subscription.transport.client.is_closed
):
return False
try:
await self._async_call_webhook_subscription_renew()
LOGGER.debug("%s: Renewed Webhook subscription", self._name)
return True
except RENEW_ERRORS as err:
self._event_manager.async_mark_events_stale()
LOGGER.debug(
"%s: Failed to renew webhook subscription %s",
self._name,
stringify_onvif_error(err),
)
return False
async def _async_renew_or_restart_webhook(
self, now: dt.datetime | None = None
) -> None:
"""Renew or start webhook subscription."""
if self._hass.is_stopping or self.state != WebHookManagerState.STARTED:
return
if self._renew_lock.locked():
LOGGER.debug("%s: Webhook renew already in progress", self._name)
# Renew is already running, another one will be
# scheduled when the current one is done if needed.
return
async with self._renew_lock:
next_attempt = SUBSCRIPTION_RESTART_INTERVAL_ON_ERROR
try:
if await self._async_renew_webhook():
next_attempt = SUBSCRIPTION_RENEW_INTERVAL
else:
await self._async_restart_webhook()
finally:
self._async_schedule_webhook_renew(next_attempt)
@callback
def _async_register_webhook(self) -> None:
"""Register the webhook for motion events."""
@ -791,6 +589,7 @@ class WebHookManager:
LOGGER.debug(
"%s: Unregistering webhook %s", self._name, self._webhook_unique_id
)
webhook.async_unregister(self._hass, self._old_webhook_unique_id)
webhook.async_unregister(self._hass, self._webhook_unique_id)
self._webhook_url = None
@ -842,23 +641,13 @@ class WebHookManager:
await event_manager.async_parse_messages(result.NotificationMessage)
event_manager.async_callback_listeners()
@callback
def _async_cancel_webhook_renew(self) -> None:
"""Cancel the webhook renew task."""
if self._cancel_webhook_renew:
self._cancel_webhook_renew()
self._cancel_webhook_renew = None
async def _async_unsubscribe_webhook(self) -> None:
"""Unsubscribe from the webhook."""
if (
not self._webhook_subscription
or self._webhook_subscription.transport.client.is_closed
):
if not self._notification_manager or self._notification_manager.closed:
return
LOGGER.debug("%s: Unsubscribing from webhook", self._name)
try:
await self._webhook_subscription.Unsubscribe()
await self._notification_manager.shutdown()
except UNSUBSCRIBE_ERRORS as err:
LOGGER.debug(
(
@ -868,4 +657,4 @@ class WebHookManager:
self._name,
stringify_onvif_error(err),
)
self._webhook_subscription = None
self._notification_manager = None

View File

@ -8,5 +8,5 @@
"documentation": "https://www.home-assistant.io/integrations/onvif",
"iot_class": "local_push",
"loggers": ["onvif", "wsdiscovery", "zeep"],
"requirements": ["onvif-zeep-async==2.1.1", "WSDiscovery==2.0.0"]
"requirements": ["onvif-zeep-async==3.1.7", "WSDiscovery==2.0.0"]
}

View File

@ -10,7 +10,7 @@
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["regenmaschine"],
"requirements": ["regenmaschine==2022.11.0"],
"requirements": ["regenmaschine==2023.05.1"],
"zeroconf": [
{
"type": "_http._tcp.local.",

View File

@ -215,6 +215,7 @@ class Recorder(threading.Thread):
self.schema_version = 0
self._commits_without_expire = 0
self._event_session_has_pending_writes = False
self.recorder_runs_manager = RecorderRunsManager()
self.states_manager = StatesManager()
@ -322,7 +323,7 @@ class Recorder(threading.Thread):
if (
self._event_listener
and not self._database_lock_task
and self._event_session_has_pending_writes()
and self._event_session_has_pending_writes
):
self.queue_task(COMMIT_TASK)
@ -688,6 +689,11 @@ class Recorder(threading.Thread):
# anything goes wrong in the run loop
self._shutdown()
def _add_to_session(self, session: Session, obj: object) -> None:
"""Add an object to the session."""
self._event_session_has_pending_writes = True
session.add(obj)
def _run(self) -> None:
"""Start processing events to save."""
self.thread_id = threading.get_ident()
@ -1016,11 +1022,11 @@ class Recorder(threading.Thread):
else:
event_types = EventTypes(event_type=event.event_type)
event_type_manager.add_pending(event_types)
session.add(event_types)
self._add_to_session(session, event_types)
dbevent.event_type_rel = event_types
if not event.data:
session.add(dbevent)
self._add_to_session(session, dbevent)
return
event_data_manager = self.event_data_manager
@ -1042,10 +1048,10 @@ class Recorder(threading.Thread):
# No matching attributes found, save them in the DB
dbevent_data = EventData(shared_data=shared_data, hash=hash_)
event_data_manager.add_pending(dbevent_data)
session.add(dbevent_data)
self._add_to_session(session, dbevent_data)
dbevent.event_data_rel = dbevent_data
session.add(dbevent)
self._add_to_session(session, dbevent)
def _process_state_changed_event_into_session(self, event: Event) -> None:
"""Process a state_changed event into the session."""
@ -1090,7 +1096,7 @@ class Recorder(threading.Thread):
else:
states_meta = StatesMeta(entity_id=entity_id)
states_meta_manager.add_pending(states_meta)
session.add(states_meta)
self._add_to_session(session, states_meta)
dbstate.states_meta_rel = states_meta
# Map the event data to the StateAttributes table
@ -1115,10 +1121,10 @@ class Recorder(threading.Thread):
# No matching attributes found, save them in the DB
dbstate_attributes = StateAttributes(shared_attrs=shared_attrs, hash=hash_)
state_attributes_manager.add_pending(dbstate_attributes)
session.add(dbstate_attributes)
self._add_to_session(session, dbstate_attributes)
dbstate.state_attributes = dbstate_attributes
session.add(dbstate)
self._add_to_session(session, dbstate)
def _handle_database_error(self, err: Exception) -> bool:
"""Handle a database error that may result in moving away the corrupt db."""
@ -1130,14 +1136,9 @@ class Recorder(threading.Thread):
return True
return False
def _event_session_has_pending_writes(self) -> bool:
"""Return True if there are pending writes in the event session."""
session = self.event_session
return bool(session and (session.new or session.dirty))
def _commit_event_session_or_retry(self) -> None:
"""Commit the event session if there is work to do."""
if not self._event_session_has_pending_writes():
if not self._event_session_has_pending_writes:
return
tries = 1
while tries <= self.db_max_retries:
@ -1163,6 +1164,7 @@ class Recorder(threading.Thread):
self._commits_without_expire += 1
session.commit()
self._event_session_has_pending_writes = False
# We just committed the state attributes to the database
# and we now know the attributes_ids. We can save
# many selects for matching attributes by loading them
@ -1263,7 +1265,7 @@ class Recorder(threading.Thread):
async def async_block_till_done(self) -> None:
"""Async version of block_till_done."""
if self._queue.empty() and not self._event_session_has_pending_writes():
if self._queue.empty() and not self._event_session_has_pending_writes:
return
event = asyncio.Event()
self.queue_task(SynchronizeTask(event))
@ -1417,6 +1419,8 @@ class Recorder(threading.Thread):
if self.event_session is None:
return
if self.recorder_runs_manager.active:
# .end will add to the event session
self._event_session_has_pending_writes = True
self.recorder_runs_manager.end(self.event_session)
try:
self._commit_event_session_or_retry()

View File

@ -39,7 +39,7 @@
"samsungctl[websocket]==0.7.1",
"samsungtvws[async,encrypted]==2.5.0",
"wakeonlan==2.1.0",
"async-upnp-client==0.33.1"
"async-upnp-client==0.33.2"
],
"ssdp": [
{

View File

@ -87,7 +87,6 @@ SENSOR_DESCRIPTIONS: dict[tuple[Units, bool], SensorEntityDescription] = {
),
(Units.NONE, False): SensorEntityDescription(
key=f"{Units.NONE}_{False}",
state_class=SensorStateClass.MEASUREMENT,
),
}

View File

@ -9,5 +9,5 @@
"iot_class": "local_push",
"loggers": ["async_upnp_client"],
"quality_scale": "internal",
"requirements": ["async-upnp-client==0.33.1"]
"requirements": ["async-upnp-client==0.33.2"]
}

View File

@ -41,7 +41,7 @@
"iot_class": "local_push",
"loggers": ["pyunifiprotect", "unifi_discovery"],
"quality_scale": "platinum",
"requirements": ["pyunifiprotect==4.8.3", "unifi-discovery==1.1.7"],
"requirements": ["pyunifiprotect==4.9.0", "unifi-discovery==1.1.7"],
"ssdp": [
{
"manufacturer": "Ubiquiti Networks",

View File

@ -8,7 +8,7 @@
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["async_upnp_client"],
"requirements": ["async-upnp-client==0.33.1", "getmac==0.8.2"],
"requirements": ["async-upnp-client==0.33.2", "getmac==0.8.2"],
"ssdp": [
{
"st": "urn:schemas-upnp-org:device:InternetGatewayDevice:1"

View File

@ -2,6 +2,7 @@
import asyncio
from datetime import timedelta
import logging
from typing import Any
import voluptuous as vol
from xiaomi_gateway import AsyncXiaomiGatewayMulticast, XiaomiGateway
@ -351,9 +352,13 @@ class XiaomiDevice(Entity):
return True
return False
def push_data(self, data: dict[str, Any], raw_data: dict[Any, Any]) -> None:
"""Push from Hub running in another thread."""
self.hass.loop.call_soon(self.async_push_data, data, raw_data)
@callback
def push_data(self, data, raw_data):
"""Push from Hub."""
def async_push_data(self, data: dict[str, Any], raw_data: dict[Any, Any]) -> None:
"""Push from Hub handled in the event loop."""
_LOGGER.debug("PUSH >> %s: %s", self, data)
was_unavailable = self._async_track_unavailable()
is_data = self.parse_data(data, raw_data)

View File

@ -15,8 +15,8 @@ from yalexs_ble import (
from homeassistant.components import bluetooth
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ADDRESS, Platform
from homeassistant.core import HomeAssistant, callback
from homeassistant.const import CONF_ADDRESS, EVENT_HOMEASSISTANT_STOP, Platform
from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from .const import CONF_KEY, CONF_LOCAL_NAME, CONF_SLOT, DEVICE_TIMEOUT, DOMAIN
@ -45,7 +45,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Update from a ble callback."""
push_lock.update_advertisement(service_info.device, service_info.advertisement)
entry.async_on_unload(await push_lock.start())
shutdown_callback: CALLBACK_TYPE | None = await push_lock.start()
@callback
def _async_shutdown(event: Event | None = None) -> None:
nonlocal shutdown_callback
if shutdown_callback:
shutdown_callback()
shutdown_callback = None
entry.async_on_unload(_async_shutdown)
# We may already have the advertisement, so check for it.
if service_info := async_find_existing_service_info(hass, local_name, address):
@ -97,6 +106,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry.async_on_unload(push_lock.register_callback(_async_state_changed))
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
entry.async_on_unload(entry.add_update_listener(_async_update_listener))
entry.async_on_unload(
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _async_shutdown)
)
return True

View File

@ -12,5 +12,5 @@
"dependencies": ["bluetooth_adapters"],
"documentation": "https://www.home-assistant.io/integrations/yalexs_ble",
"iot_class": "local_push",
"requirements": ["yalexs-ble==2.1.16"]
"requirements": ["yalexs-ble==2.1.17"]
}

View File

@ -17,7 +17,7 @@
"iot_class": "local_push",
"loggers": ["async_upnp_client", "yeelight"],
"quality_scale": "platinum",
"requirements": ["yeelight==0.7.10", "async-upnp-client==0.33.1"],
"requirements": ["yeelight==0.7.10", "async-upnp-client==0.33.2"],
"zeroconf": [
{
"type": "_miio._udp.local.",

View File

@ -64,8 +64,16 @@ class ZamgWeather(CoordinatorEntity, WeatherEntity):
def native_temperature(self) -> float | None:
"""Return the platform temperature."""
try:
return float(self.coordinator.data[self.station_id]["TL"]["data"])
except (KeyError, ValueError):
if (
value := self.coordinator.data[self.station_id]["TLAM"]["data"]
) is not None:
return float(value)
if (
value := self.coordinator.data[self.station_id]["TL"]["data"]
) is not None:
return float(value)
return None
except (KeyError, ValueError, TypeError):
return None
@property
@ -73,7 +81,7 @@ class ZamgWeather(CoordinatorEntity, WeatherEntity):
"""Return the pressure."""
try:
return float(self.coordinator.data[self.station_id]["P"]["data"])
except (KeyError, ValueError):
except (KeyError, ValueError, TypeError):
return None
@property
@ -81,21 +89,37 @@ class ZamgWeather(CoordinatorEntity, WeatherEntity):
"""Return the humidity."""
try:
return float(self.coordinator.data[self.station_id]["RFAM"]["data"])
except (KeyError, ValueError):
except (KeyError, ValueError, TypeError):
return None
@property
def native_wind_speed(self) -> float | None:
"""Return the wind speed."""
try:
return float(self.coordinator.data[self.station_id]["FFAM"]["data"])
except (KeyError, ValueError):
if (
value := self.coordinator.data[self.station_id]["FFAM"]["data"]
) is not None:
return float(value)
if (
value := self.coordinator.data[self.station_id]["FFX"]["data"]
) is not None:
return float(value)
return None
except (KeyError, ValueError, TypeError):
return None
@property
def wind_bearing(self) -> float | str | None:
def wind_bearing(self) -> float | None:
"""Return the wind bearing."""
try:
return self.coordinator.data[self.station_id]["DD"]["data"]
except (KeyError, ValueError):
if (
value := self.coordinator.data[self.station_id]["DD"]["data"]
) is not None:
return float(value)
if (
value := self.coordinator.data[self.station_id]["DDX"]["data"]
) is not None:
return float(value)
return None
except (KeyError, ValueError, TypeError):
return None

View File

@ -976,19 +976,19 @@ def async_discover_single_value(
continue
# check device_class_basic
if not check_device_class(
if value.node.device_class and not check_device_class(
value.node.device_class.basic, schema.device_class_basic
):
continue
# check device_class_generic
if not check_device_class(
if value.node.device_class and not check_device_class(
value.node.device_class.generic, schema.device_class_generic
):
continue
# check device_class_specific
if not check_device_class(
if value.node.device_class and not check_device_class(
value.node.device_class.specific, schema.device_class_specific
):
continue

View File

@ -8,7 +8,7 @@
"integration_type": "hub",
"iot_class": "local_push",
"loggers": ["zwave_js_server"],
"requirements": ["pyserial==3.5", "zwave-js-server-python==0.48.0"],
"requirements": ["pyserial==3.5", "zwave-js-server-python==0.48.1"],
"usb": [
{
"vid": "0658",

View File

@ -8,7 +8,7 @@ from .backports.enum import StrEnum
APPLICATION_NAME: Final = "HomeAssistant"
MAJOR_VERSION: Final = 2023
MINOR_VERSION: Final = 5
PATCH_VERSION: Final = "3"
PATCH_VERSION: Final = "4"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 10, 0)

View File

@ -5,7 +5,7 @@ aiodiscover==1.4.16
aiohttp==3.8.4
aiohttp_cors==0.7.0
astral==2.2
async-upnp-client==0.33.1
async-upnp-client==0.33.2
async_timeout==4.0.2
atomicwrites-homeassistant==1.4.1
attrs==22.2.0
@ -27,7 +27,7 @@ hassil==1.0.6
home-assistant-bluetooth==1.10.0
home-assistant-frontend==20230503.3
home-assistant-intents==2023.4.26
httpx==0.24.0
httpx==0.24.1
ifaddr==0.1.7
janus==1.0.0
jinja2==3.1.2

View File

@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "homeassistant"
version = "2023.5.3"
version = "2023.5.4"
license = {text = "Apache-2.0"}
description = "Open-source home automation platform running on Python 3."
readme = "README.rst"
@ -34,7 +34,7 @@ dependencies = [
"ciso8601==2.3.0",
# When bumping httpx, please check the version pins of
# httpcore, anyio, and h11 in gen_requirements_all
"httpx==0.24.0",
"httpx==0.24.1",
"home-assistant-bluetooth==1.10.0",
"ifaddr==0.1.7",
"jinja2==3.1.2",

View File

@ -10,7 +10,7 @@ awesomeversion==22.9.0
bcrypt==4.0.1
certifi>=2021.5.30
ciso8601==2.3.0
httpx==0.24.0
httpx==0.24.1
home-assistant-bluetooth==1.10.0
ifaddr==0.1.7
jinja2==3.1.2

View File

@ -71,7 +71,7 @@ WSDiscovery==2.0.0
WazeRouteCalculator==0.14
# homeassistant.components.accuweather
accuweather==0.5.1
accuweather==0.5.2
# homeassistant.components.adax
adax==0.2.0
@ -223,7 +223,7 @@ aionanoleaf==0.2.1
aionotify==0.2.0
# homeassistant.components.notion
aionotion==2023.05.4
aionotion==2023.05.5
# homeassistant.components.oncue
aiooncue==0.3.4
@ -377,7 +377,7 @@ asterisk_mbox==0.5.0
# homeassistant.components.ssdp
# homeassistant.components.upnp
# homeassistant.components.yeelight
async-upnp-client==0.33.1
async-upnp-client==0.33.2
# homeassistant.components.supla
asyncpysupla==0.0.5
@ -431,7 +431,7 @@ beautifulsoup4==4.11.1
bellows==0.35.5
# homeassistant.components.bmw_connected_drive
bimmer_connected==0.13.3
bimmer_connected==0.13.5
# homeassistant.components.bizkaibus
bizkaibus==0.1.1
@ -796,7 +796,7 @@ gios==3.1.0
gitterpy==0.1.7
# homeassistant.components.glances
glances_api==0.4.1
glances_api==0.4.2
# homeassistant.components.goalzero
goalzero==0.2.1
@ -1264,7 +1264,7 @@ ondilo==0.2.0
onkyo-eiscp==1.2.7
# homeassistant.components.onvif
onvif-zeep-async==2.1.1
onvif-zeep-async==3.1.7
# homeassistant.components.opengarage
open-garage==0.2.0
@ -1512,7 +1512,7 @@ pyatmo==7.5.0
pyatome==0.1.1
# homeassistant.components.apple_tv
pyatv==0.10.3
pyatv==0.11.0
# homeassistant.components.aussie_broadband
pyaussiebb==0.0.15
@ -2080,7 +2080,7 @@ python-kasa==0.5.1
# python-lirc==1.2.3
# homeassistant.components.matter
python-matter-server==3.2.0
python-matter-server==3.4.1
# homeassistant.components.xiaomi_miio
python-miio==0.5.12
@ -2158,7 +2158,7 @@ pytrafikverket==0.2.3
pyudev==0.23.2
# homeassistant.components.unifiprotect
pyunifiprotect==4.8.3
pyunifiprotect==4.9.0
# homeassistant.components.uptimerobot
pyuptimerobot==22.2.0
@ -2236,7 +2236,7 @@ rapt-ble==0.1.0
raspyrfm-client==1.2.8
# homeassistant.components.rainmachine
regenmaschine==2022.11.0
regenmaschine==2023.05.1
# homeassistant.components.renault
renault-api==0.1.13
@ -2685,10 +2685,10 @@ yalesmartalarmclient==0.3.9
# homeassistant.components.august
# homeassistant.components.yalexs_ble
yalexs-ble==2.1.16
yalexs-ble==2.1.17
# homeassistant.components.august
yalexs==1.3.3
yalexs==1.5.1
# homeassistant.components.yeelight
yeelight==0.7.10
@ -2745,7 +2745,7 @@ zigpy==0.55.0
zm-py==0.5.2
# homeassistant.components.zwave_js
zwave-js-server-python==0.48.0
zwave-js-server-python==0.48.1
# homeassistant.components.zwave_me
zwave_me_ws==0.4.2

View File

@ -61,7 +61,7 @@ WSDiscovery==2.0.0
WazeRouteCalculator==0.14
# homeassistant.components.accuweather
accuweather==0.5.1
accuweather==0.5.2
# homeassistant.components.adax
adax==0.2.0
@ -204,7 +204,7 @@ aiomusiccast==0.14.8
aionanoleaf==0.2.1
# homeassistant.components.notion
aionotion==2023.05.4
aionotion==2023.05.5
# homeassistant.components.oncue
aiooncue==0.3.4
@ -337,7 +337,7 @@ arcam-fmj==1.3.0
# homeassistant.components.ssdp
# homeassistant.components.upnp
# homeassistant.components.yeelight
async-upnp-client==0.33.1
async-upnp-client==0.33.2
# homeassistant.components.sleepiq
asyncsleepiq==1.3.5
@ -364,7 +364,7 @@ beautifulsoup4==4.11.1
bellows==0.35.5
# homeassistant.components.bmw_connected_drive
bimmer_connected==0.13.3
bimmer_connected==0.13.5
# homeassistant.components.bluetooth
bleak-retry-connector==3.0.2
@ -615,7 +615,7 @@ getmac==0.8.2
gios==3.1.0
# homeassistant.components.glances
glances_api==0.4.1
glances_api==0.4.2
# homeassistant.components.goalzero
goalzero==0.2.1
@ -945,7 +945,7 @@ omnilogic==0.4.5
ondilo==0.2.0
# homeassistant.components.onvif
onvif-zeep-async==2.1.1
onvif-zeep-async==3.1.7
# homeassistant.components.opengarage
open-garage==0.2.0
@ -1115,7 +1115,7 @@ pyatag==0.3.5.3
pyatmo==7.5.0
# homeassistant.components.apple_tv
pyatv==0.10.3
pyatv==0.11.0
# homeassistant.components.aussie_broadband
pyaussiebb==0.0.15
@ -1497,7 +1497,7 @@ python-juicenet==1.1.0
python-kasa==0.5.1
# homeassistant.components.matter
python-matter-server==3.2.0
python-matter-server==3.4.1
# homeassistant.components.xiaomi_miio
python-miio==0.5.12
@ -1554,7 +1554,7 @@ pytrafikverket==0.2.3
pyudev==0.23.2
# homeassistant.components.unifiprotect
pyunifiprotect==4.8.3
pyunifiprotect==4.9.0
# homeassistant.components.uptimerobot
pyuptimerobot==22.2.0
@ -1605,7 +1605,7 @@ radiotherm==2.1.0
rapt-ble==0.1.0
# homeassistant.components.rainmachine
regenmaschine==2022.11.0
regenmaschine==2023.05.1
# homeassistant.components.renault
renault-api==0.1.13
@ -1940,10 +1940,10 @@ yalesmartalarmclient==0.3.9
# homeassistant.components.august
# homeassistant.components.yalexs_ble
yalexs-ble==2.1.16
yalexs-ble==2.1.17
# homeassistant.components.august
yalexs==1.3.3
yalexs==1.5.1
# homeassistant.components.yeelight
yeelight==0.7.10
@ -1982,7 +1982,7 @@ zigpy-znp==0.11.1
zigpy==0.55.0
# homeassistant.components.zwave_js
zwave-js-server-python==0.48.0
zwave-js-server-python==0.48.1
# homeassistant.components.zwave_me
zwave_me_ws==0.4.2

View File

@ -6,6 +6,7 @@ from yalexs.authenticator import ValidationResult
from homeassistant import config_entries
from homeassistant.components.august.const import (
CONF_ACCESS_TOKEN_CACHE_FILE,
CONF_BRAND,
CONF_INSTALL_ID,
CONF_LOGIN_METHOD,
DOMAIN,
@ -18,6 +19,7 @@ from homeassistant.components.august.exceptions import (
)
from homeassistant.const import CONF_PASSWORD, CONF_TIMEOUT, CONF_USERNAME
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from tests.common import MockConfigEntry
@ -28,7 +30,7 @@ async def test_form(hass: HomeAssistant) -> None:
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["type"] is FlowResultType.FORM
assert result["errors"] == {}
with patch(
@ -41,6 +43,7 @@ async def test_form(hass: HomeAssistant) -> None:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_BRAND: "august",
CONF_LOGIN_METHOD: "email",
CONF_USERNAME: "my@email.tld",
CONF_PASSWORD: "test-password",
@ -48,9 +51,10 @@ async def test_form(hass: HomeAssistant) -> None:
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["type"] is FlowResultType.CREATE_ENTRY
assert result2["title"] == "my@email.tld"
assert result2["data"] == {
CONF_BRAND: "august",
CONF_LOGIN_METHOD: "email",
CONF_USERNAME: "my@email.tld",
CONF_INSTALL_ID: None,
@ -72,13 +76,14 @@ async def test_form_invalid_auth(hass: HomeAssistant) -> None:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_BRAND: "august",
CONF_LOGIN_METHOD: "email",
CONF_USERNAME: "my@email.tld",
CONF_PASSWORD: "test-password",
},
)
assert result2["type"] == "form"
assert result2["type"] is FlowResultType.FORM
assert result2["errors"] == {"base": "invalid_auth"}
@ -90,19 +95,21 @@ async def test_user_unexpected_exception(hass: HomeAssistant) -> None:
with patch(
"homeassistant.components.august.config_flow.AugustGateway.async_authenticate",
side_effect=ValueError,
side_effect=ValueError("something exploded"),
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_BRAND: "august",
CONF_LOGIN_METHOD: "email",
CONF_USERNAME: "my@email.tld",
CONF_PASSWORD: "test-password",
},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "unknown"}
assert result2["type"] is FlowResultType.FORM
assert result2["errors"] == {"base": "unhandled"}
assert result2["description_placeholders"] == {"error": "something exploded"}
async def test_form_cannot_connect(hass: HomeAssistant) -> None:
@ -124,7 +131,7 @@ async def test_form_cannot_connect(hass: HomeAssistant) -> None:
},
)
assert result2["type"] == "form"
assert result2["type"] is FlowResultType.FORM
assert result2["errors"] == {"base": "cannot_connect"}
@ -151,7 +158,7 @@ async def test_form_needs_validate(hass: HomeAssistant) -> None:
)
assert len(mock_send_verification_code.mock_calls) == 1
assert result2["type"] == "form"
assert result2["type"] is FlowResultType.FORM
assert result2["errors"] is None
assert result2["step_id"] == "validation"
@ -165,9 +172,7 @@ async def test_form_needs_validate(hass: HomeAssistant) -> None:
) as mock_validate_verification_code, patch(
"homeassistant.components.august.gateway.AuthenticatorAsync.async_send_verification_code",
return_value=True,
) as mock_send_verification_code, patch(
"homeassistant.components.august.async_setup_entry", return_value=True
) as mock_setup_entry:
) as mock_send_verification_code:
result3 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{VERIFICATION_CODE_KEY: "incorrect"},
@ -177,8 +182,8 @@ async def test_form_needs_validate(hass: HomeAssistant) -> None:
# so they have a chance to retry
assert len(mock_send_verification_code.mock_calls) == 0
assert len(mock_validate_verification_code.mock_calls) == 1
assert result3["type"] == "form"
assert result3["errors"] is None
assert result3["type"] is FlowResultType.FORM
assert result3["errors"] == {"base": "invalid_verification_code"}
assert result3["step_id"] == "validation"
# Try with the CORRECT verification code and we setup
@ -202,9 +207,10 @@ async def test_form_needs_validate(hass: HomeAssistant) -> None:
assert len(mock_send_verification_code.mock_calls) == 0
assert len(mock_validate_verification_code.mock_calls) == 1
assert result4["type"] == "create_entry"
assert result4["type"] is FlowResultType.CREATE_ENTRY
assert result4["title"] == "my@email.tld"
assert result4["data"] == {
CONF_BRAND: "august",
CONF_LOGIN_METHOD: "email",
CONF_USERNAME: "my@email.tld",
CONF_INSTALL_ID: None,
@ -233,7 +239,7 @@ async def test_form_reauth(hass: HomeAssistant) -> None:
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data
)
assert result["type"] == "form"
assert result["type"] is FlowResultType.FORM
assert result["errors"] == {}
with patch(
@ -251,7 +257,7 @@ async def test_form_reauth(hass: HomeAssistant) -> None:
)
await hass.async_block_till_done()
assert result2["type"] == "abort"
assert result2["type"] is FlowResultType.ABORT
assert result2["reason"] == "reauth_successful"
assert len(mock_setup_entry.mock_calls) == 1
@ -276,7 +282,7 @@ async def test_form_reauth_with_2fa(hass: HomeAssistant) -> None:
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data
)
assert result["type"] == "form"
assert result["type"] is FlowResultType.FORM
assert result["errors"] == {}
with patch(
@ -295,7 +301,7 @@ async def test_form_reauth_with_2fa(hass: HomeAssistant) -> None:
await hass.async_block_till_done()
assert len(mock_send_verification_code.mock_calls) == 1
assert result2["type"] == "form"
assert result2["type"] is FlowResultType.FORM
assert result2["errors"] is None
assert result2["step_id"] == "validation"
@ -320,6 +326,52 @@ async def test_form_reauth_with_2fa(hass: HomeAssistant) -> None:
assert len(mock_validate_verification_code.mock_calls) == 1
assert len(mock_send_verification_code.mock_calls) == 0
assert result3["type"] == "abort"
assert result3["type"] is FlowResultType.ABORT
assert result3["reason"] == "reauth_successful"
assert len(mock_setup_entry.mock_calls) == 1
async def test_switching_brands(hass: HomeAssistant) -> None:
"""Test brands can be switched by setting up again."""
entry = MockConfigEntry(
domain=DOMAIN,
data={
CONF_LOGIN_METHOD: "email",
CONF_USERNAME: "my@email.tld",
CONF_PASSWORD: "test-password",
CONF_INSTALL_ID: None,
CONF_TIMEOUT: 10,
CONF_ACCESS_TOKEN_CACHE_FILE: ".my@email.tld.august.conf",
},
unique_id="my@email.tld",
)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] is FlowResultType.FORM
assert result["errors"] == {}
with patch(
"homeassistant.components.august.config_flow.AugustGateway.async_authenticate",
return_value=True,
), patch(
"homeassistant.components.august.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_BRAND: "yale_home",
CONF_LOGIN_METHOD: "email",
CONF_USERNAME: "my@email.tld",
CONF_PASSWORD: "test-password",
},
)
await hass.async_block_till_done()
assert result2["type"] is FlowResultType.ABORT
assert result2["reason"] == "reauth_successful"
assert len(mock_setup_entry.mock_calls) == 1
assert entry.data[CONF_BRAND] == "yale_home"

View File

@ -141,4 +141,5 @@ async def test_diagnostics(
"zWaveEnabled": False,
}
},
"brand": "august",
}

View File

@ -77,12 +77,42 @@ async def test_august_is_offline(hass: HomeAssistant) -> None:
assert config_entry.state is ConfigEntryState.SETUP_RETRY
async def test_august_late_auth_failure(hass: HomeAssistant) -> None:
"""Test we can detect a late auth failure."""
aiohttp_client_response_exception = ClientResponseError(None, None, status=401)
config_entry = MockConfigEntry(
domain=DOMAIN,
data=_mock_get_config()[DOMAIN],
title="August august",
)
config_entry.add_to_hass(hass)
with patch(
"yalexs.authenticator_async.AuthenticatorAsync.async_authenticate",
side_effect=AugustApiAIOHTTPError(
"This should bubble up as its user consumable",
aiohttp_client_response_exception,
),
):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert config_entry.state is ConfigEntryState.SETUP_ERROR
flows = hass.config_entries.flow.async_progress()
assert flows[0]["step_id"] == "reauth_validate"
async def test_unlock_throws_august_api_http_error(hass: HomeAssistant) -> None:
"""Test unlock throws correct error on http error."""
mocked_lock_detail = await _mock_operative_august_lock_detail(hass)
aiohttp_client_response_exception = ClientResponseError(None, None, status=400)
def _unlock_return_activities_side_effect(access_token, device_id):
raise AugustApiAIOHTTPError("This should bubble up as its user consumable")
raise AugustApiAIOHTTPError(
"This should bubble up as its user consumable",
aiohttp_client_response_exception,
)
await _create_august_with_devices(
hass,
@ -106,9 +136,13 @@ async def test_unlock_throws_august_api_http_error(hass: HomeAssistant) -> None:
async def test_lock_throws_august_api_http_error(hass: HomeAssistant) -> None:
"""Test lock throws correct error on http error."""
mocked_lock_detail = await _mock_operative_august_lock_detail(hass)
aiohttp_client_response_exception = ClientResponseError(None, None, status=400)
def _lock_return_activities_side_effect(access_token, device_id):
raise AugustApiAIOHTTPError("This should bubble up as its user consumable")
raise AugustApiAIOHTTPError(
"This should bubble up as its user consumable",
aiohttp_client_response_exception,
)
await _create_august_with_devices(
hass,

View File

@ -13,6 +13,7 @@ import respx
from homeassistant import config_entries
from homeassistant.components.bmw_connected_drive.const import (
CONF_GCID,
CONF_READ_ONLY,
CONF_REFRESH_TOKEN,
DOMAIN as BMW_DOMAIN,
@ -33,6 +34,7 @@ FIXTURE_USER_INPUT = {
CONF_REGION: "rest_of_world",
}
FIXTURE_REFRESH_TOKEN = "SOME_REFRESH_TOKEN"
FIXTURE_GCID = "SOME_GCID"
FIXTURE_CONFIG_ENTRY = {
"entry_id": "1",
@ -43,6 +45,7 @@ FIXTURE_CONFIG_ENTRY = {
CONF_PASSWORD: FIXTURE_USER_INPUT[CONF_PASSWORD],
CONF_REGION: FIXTURE_USER_INPUT[CONF_REGION],
CONF_REFRESH_TOKEN: FIXTURE_REFRESH_TOKEN,
CONF_GCID: FIXTURE_GCID,
},
"options": {CONF_READ_ONLY: False},
"source": config_entries.SOURCE_USER,

View File

@ -2357,6 +2357,7 @@
}),
]),
'info': dict({
'gcid': 'SOME_GCID',
'password': '**REDACTED**',
'refresh_token': '**REDACTED**',
'region': 'rest_of_world',
@ -3860,6 +3861,7 @@
}),
]),
'info': dict({
'gcid': 'SOME_GCID',
'password': '**REDACTED**',
'refresh_token': '**REDACTED**',
'region': 'rest_of_world',
@ -4692,6 +4694,7 @@
}),
]),
'info': dict({
'gcid': 'SOME_GCID',
'password': '**REDACTED**',
'refresh_token': '**REDACTED**',
'region': 'rest_of_world',

View File

@ -14,7 +14,12 @@ from homeassistant.components.bmw_connected_drive.const import (
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import HomeAssistant
from . import FIXTURE_CONFIG_ENTRY, FIXTURE_REFRESH_TOKEN, FIXTURE_USER_INPUT
from . import (
FIXTURE_CONFIG_ENTRY,
FIXTURE_GCID,
FIXTURE_REFRESH_TOKEN,
FIXTURE_USER_INPUT,
)
from tests.common import MockConfigEntry
@ -25,6 +30,7 @@ FIXTURE_IMPORT_ENTRY = {**FIXTURE_USER_INPUT, CONF_REFRESH_TOKEN: None}
def login_sideeffect(self: MyBMWAuthentication):
"""Mock logging in and setting a refresh token."""
self.refresh_token = FIXTURE_REFRESH_TOKEN
self.gcid = FIXTURE_GCID
async def test_show_form(hass: HomeAssistant) -> None:

View File

@ -15,6 +15,7 @@ from homeassistant.util.dt import utcnow
from .const import (
BAD_RESPONSE,
EMPTY_SEARCH_RESPONSE,
TEST_FETCH_RESPONSE_BINARY,
TEST_FETCH_RESPONSE_HTML,
TEST_FETCH_RESPONSE_INVALID_DATE,
@ -347,3 +348,101 @@ async def test_fetch_number_of_messages(
# we should have an entity with an unavailable state
assert state is not None
assert state.state == STATE_UNAVAILABLE
@pytest.mark.parametrize("imap_search", [TEST_SEARCH_RESPONSE])
@pytest.mark.parametrize(
("imap_fetch", "valid_date"),
[(TEST_FETCH_RESPONSE_TEXT_PLAIN, True)],
ids=["plain"],
)
@pytest.mark.parametrize("imap_has_capability", [True, False], ids=["push", "poll"])
async def test_reset_last_message(
hass: HomeAssistant, mock_imap_protocol: MagicMock, valid_date: bool
) -> None:
"""Test receiving a message successfully."""
event = asyncio.Event() # needed for pushed coordinator to make a new loop
async def _sleep_till_event() -> None:
"""Simulate imap server waiting for pushes message and keep the push loop going.
Needed for pushed coordinator only.
"""
nonlocal event
await event.wait()
event.clear()
mock_imap_protocol.idle_start.return_value = AsyncMock()()
# Make sure we make another cycle (needed for pushed coordinator)
mock_imap_protocol.idle_start.return_value = AsyncMock()()
# Mock we wait till we push an update (needed for pushed coordinator)
mock_imap_protocol.wait_server_push.side_effect = _sleep_till_event
event_called = async_capture_events(hass, "imap_content")
config_entry = MockConfigEntry(domain=DOMAIN, data=MOCK_CONFIG)
config_entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
# Make sure we have had one update (when polling)
async_fire_time_changed(hass, utcnow() + timedelta(seconds=5))
await hass.async_block_till_done()
state = hass.states.get("sensor.imap_email_email_com")
# We should have received one message
assert state is not None
assert state.state == "1"
# We should have received one event
assert len(event_called) == 1
data: dict[str, Any] = event_called[0].data
assert data["server"] == "imap.server.com"
assert data["username"] == "email@email.com"
assert data["search"] == "UnSeen UnDeleted"
assert data["folder"] == "INBOX"
assert data["sender"] == "john.doe@example.com"
assert data["subject"] == "Test subject"
assert data["text"]
assert (
valid_date
and isinstance(data["date"], datetime)
or not valid_date
and data["date"] is None
)
# Simulate an update where no messages are found (needed for pushed coordinator)
mock_imap_protocol.search.return_value = Response(*EMPTY_SEARCH_RESPONSE)
# Make sure we have an update
async_fire_time_changed(hass, utcnow() + timedelta(seconds=30))
# Awake loop (needed for pushed coordinator)
event.set()
await hass.async_block_till_done()
state = hass.states.get("sensor.imap_email_email_com")
# We should have message
assert state is not None
assert state.state == "0"
# No new events should be called
assert len(event_called) == 1
# Simulate an update where with the original message
mock_imap_protocol.search.return_value = Response(*TEST_SEARCH_RESPONSE)
# Make sure we have an update again with the same UID
async_fire_time_changed(hass, utcnow() + timedelta(seconds=30))
# Awake loop (needed for pushed coordinator)
event.set()
await hass.async_block_till_done()
state = hass.states.get("sensor.imap_email_email_com")
# We should have received one message
assert state is not None
assert state.state == "1"
await hass.async_block_till_done()
await hass.async_block_till_done()
# One new event
assert len(event_called) == 2

View File

@ -2,6 +2,8 @@
from datetime import timedelta
from unittest.mock import patch
import pytest
from homeassistant.components.sensor import SensorDeviceClass, SensorStateClass
from homeassistant.const import (
ATTR_UNIT_OF_MEASUREMENT,
@ -20,7 +22,8 @@ import homeassistant.util.dt as dt_util
from tests.common import mock_restore_cache
async def test_state(hass: HomeAssistant) -> None:
@pytest.mark.parametrize("method", ["trapezoidal", "left", "right"])
async def test_state(hass: HomeAssistant, method) -> None:
"""Test integration sensor state."""
config = {
"sensor": {
@ -28,6 +31,7 @@ async def test_state(hass: HomeAssistant) -> None:
"name": "integration",
"source": "sensor.power",
"round": 2,
"method": method,
}
}
@ -46,8 +50,8 @@ async def test_state(hass: HomeAssistant) -> None:
assert state.attributes.get("state_class") is SensorStateClass.TOTAL
assert "device_class" not in state.attributes
future_now = dt_util.utcnow() + timedelta(seconds=3600)
with patch("homeassistant.util.dt.utcnow", return_value=future_now):
now += timedelta(seconds=3600)
with patch("homeassistant.util.dt.utcnow", return_value=now):
hass.states.async_set(
entity_id,
1,
@ -69,6 +73,62 @@ async def test_state(hass: HomeAssistant) -> None:
assert state.attributes.get("device_class") == SensorDeviceClass.ENERGY
assert state.attributes.get("state_class") is SensorStateClass.TOTAL
# 1 hour after last update, power sensor is unavailable
now += timedelta(seconds=3600)
with patch("homeassistant.util.dt.utcnow", return_value=now):
hass.states.async_set(
entity_id,
STATE_UNAVAILABLE,
{
"device_class": SensorDeviceClass.POWER,
ATTR_UNIT_OF_MEASUREMENT: UnitOfPower.KILO_WATT,
},
force_update=True,
)
await hass.async_block_till_done()
state = hass.states.get("sensor.integration")
assert state.state == STATE_UNAVAILABLE
# 1 hour after last update, power sensor is back to normal at 2 KiloWatts and stays for 1 hour += 2kWh
now += timedelta(seconds=3600)
with patch("homeassistant.util.dt.utcnow", return_value=now):
hass.states.async_set(
entity_id,
2,
{
"device_class": SensorDeviceClass.POWER,
ATTR_UNIT_OF_MEASUREMENT: UnitOfPower.KILO_WATT,
},
force_update=True,
)
await hass.async_block_till_done()
state = hass.states.get("sensor.integration")
assert (
round(float(state.state), config["sensor"]["round"]) == 3.0
if method == "right"
else 1.0
)
now += timedelta(seconds=3600)
with patch("homeassistant.util.dt.utcnow", return_value=now):
hass.states.async_set(
entity_id,
2,
{
"device_class": SensorDeviceClass.POWER,
ATTR_UNIT_OF_MEASUREMENT: UnitOfPower.KILO_WATT,
},
force_update=True,
)
await hass.async_block_till_done()
state = hass.states.get("sensor.integration")
assert (
round(float(state.state), config["sensor"]["round"]) == 5.0
if method == "right"
else 3.0
)
async def test_restore_state(hass: HomeAssistant) -> None:
"""Test integration sensor state is restored correctly."""
@ -416,13 +476,15 @@ async def test_units(hass: HomeAssistant) -> None:
assert new_state.state == STATE_UNAVAILABLE
async def test_device_class(hass: HomeAssistant) -> None:
@pytest.mark.parametrize("method", ["trapezoidal", "left", "right"])
async def test_device_class(hass: HomeAssistant, method) -> None:
"""Test integration sensor units using a power source."""
config = {
"sensor": {
"platform": "integration",
"name": "integration",
"source": "sensor.power",
"method": method,
}
}
@ -465,13 +527,15 @@ async def test_device_class(hass: HomeAssistant) -> None:
assert state.attributes.get("device_class") == SensorDeviceClass.ENERGY
async def test_calc_errors(hass: HomeAssistant) -> None:
@pytest.mark.parametrize("method", ["trapezoidal", "left", "right"])
async def test_calc_errors(hass: HomeAssistant, method) -> None:
"""Test integration sensor units using a power source."""
config = {
"sensor": {
"platform": "integration",
"name": "integration",
"source": "sensor.power",
"method": method,
}
}
@ -479,6 +543,7 @@ async def test_calc_errors(hass: HomeAssistant) -> None:
entity_id = config["sensor"]["source"]
now = dt_util.utcnow()
hass.states.async_set(entity_id, None, {})
await hass.async_block_till_done()
@ -489,19 +554,25 @@ async def test_calc_errors(hass: HomeAssistant) -> None:
assert state.state == STATE_UNKNOWN
# Moving from an unknown state to a value is a calc error and should
# not change the value of the Reimann sensor.
hass.states.async_set(entity_id, 0, {"device_class": None})
# not change the value of the Reimann sensor, unless the method used is "right".
now += timedelta(seconds=3600)
with patch("homeassistant.util.dt.utcnow", return_value=now):
hass.states.async_set(entity_id, 0, {"device_class": None})
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get("sensor.integration")
assert state is not None
assert state.state == STATE_UNKNOWN
assert state.state == STATE_UNKNOWN if method != "right" else "0.000"
# With the source sensor updated successfully, the Reimann sensor
# should have a zero (known) value.
hass.states.async_set(entity_id, 1, {"device_class": None})
now += timedelta(seconds=3600)
with patch("homeassistant.util.dt.utcnow", return_value=now):
hass.states.async_set(entity_id, 1, {"device_class": None})
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get("sensor.integration")
assert state is not None
assert round(float(state.state)) == 0
assert round(float(state.state)) == 0 if method != "right" else 1

View File

@ -6,7 +6,7 @@
"attributes": {
"0/29/0": [
{
"type": 22,
"deviceType": 22,
"revision": 1
}
],
@ -202,7 +202,7 @@
],
"1/29/0": [
{
"type": 268,
"deviceType": 268,
"revision": 1
}
],

View File

@ -6,7 +6,7 @@
"attributes": {
"0/29/0": [
{
"type": 22,
"deviceType": 22,
"revision": 1
}
],
@ -61,7 +61,7 @@
"1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533],
"1/29/0": [
{
"type": 21,
"deviceType": 21,
"revision": 1
}
],

View File

@ -12,7 +12,7 @@
"0/4/65531": [0, 65528, 65529, 65531, 65532, 65533],
"0/29/0": [
{
"type": 22,
"deviceType": 22,
"revision": 1
}
],
@ -414,7 +414,7 @@
],
"1/29/0": [
{
"type": 257,
"deviceType": 257,
"revision": 1
}
],

View File

@ -12,7 +12,7 @@
"0/4/65531": [0, 65528, 65529, 65531, 65532, 65533],
"0/29/0": [
{
"type": 22,
"deviceType": 22,
"revision": 1
}
],
@ -354,7 +354,7 @@
],
"1/29/0": [
{
"type": 257,
"deviceType": 257,
"revision": 1
}
],

View File

@ -7,7 +7,7 @@
"attributes": {
"0/29/0": [
{
"type": 22,
"deviceType": 22,
"revision": 1
}
],
@ -443,7 +443,7 @@
],
"1/29/0": [
{
"type": 10,
"deviceType": 10,
"revision": 1
}
],

View File

@ -6,7 +6,7 @@
"attributes": {
"0/29/0": [
{
"type": 22,
"deviceType": 22,
"revision": 1
}
],
@ -202,7 +202,7 @@
],
"1/29/0": [
{
"type": 269,
"deviceType": 269,
"revision": 1
}
],

View File

@ -6,7 +6,7 @@
"attributes": {
"0/29/0": [
{
"type": 22,
"deviceType": 22,
"revision": 1
}
],
@ -56,7 +56,7 @@
"1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533],
"1/29/0": [
{
"type": 774,
"deviceType": 774,
"revision": 1
}
],

View File

@ -6,7 +6,7 @@
"attributes": {
"0/29/0": [
{
"type": 22,
"deviceType": 22,
"revision": 1
}
],
@ -56,7 +56,7 @@
"1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533],
"1/29/0": [
{
"type": 775,
"deviceType": 775,
"revision": 1
}
],

View File

@ -6,7 +6,7 @@
"attributes": {
"0/29/0": [
{
"type": 22,
"deviceType": 22,
"revision": 1
}
],
@ -56,7 +56,7 @@
"1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533],
"1/29/0": [
{
"type": 262,
"deviceType": 262,
"revision": 1
}
],

View File

@ -6,7 +6,7 @@
"attributes": {
"0/29/0": [
{
"type": 22,
"deviceType": 22,
"revision": 1
}
],
@ -61,7 +61,7 @@
"1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533],
"1/29/0": [
{
"type": 263,
"deviceType": 263,
"revision": 1
}
],

View File

@ -6,7 +6,7 @@
"attributes": {
"0/29/0": [
{
"type": 22,
"deviceType": 22,
"revision": 1
}
],
@ -118,7 +118,7 @@
],
"1/29/0": [
{
"type": 266,
"deviceType": 266,
"revision": 1
}
],

View File

@ -12,7 +12,7 @@
"0/4/65531": [0, 65528, 65529, 65531, 65532, 65533],
"0/29/0": [
{
"type": 22,
"deviceType": 22,
"revision": 1
}
],
@ -354,7 +354,7 @@
],
"1/29/0": [
{
"type": 257,
"deviceType": 257,
"revision": 1
}
],

View File

@ -6,7 +6,7 @@
"attributes": {
"0/29/0": [
{
"type": 22,
"deviceType": 22,
"revision": 1
}
],
@ -56,7 +56,7 @@
"1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533],
"1/29/0": [
{
"type": 773,
"deviceType": 773,
"revision": 1
}
],

View File

@ -6,7 +6,7 @@
"attributes": {
"0/29/0": [
{
"type": 22,
"deviceType": 22,
"revision": 1
}
],
@ -61,7 +61,7 @@
"1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533],
"1/29/0": [
{
"type": 770,
"deviceType": 770,
"revision": 1
}
],

View File

@ -7,7 +7,7 @@
"attributes": {
"0/29/0": [
{
"type": 22,
"deviceType": 22,
"revision": 1
}
],
@ -281,7 +281,7 @@
"1/4/65531": [0, 65528, 65529, 65531, 65532, 65533],
"1/29/0": [
{
"type": 514,
"deviceType": 514,
"revision": 1
}
],

View File

@ -188,7 +188,7 @@ async def test_color_temperature_light(
"turn_on",
{
"entity_id": entity_id,
"color_temp": 3000,
"color_temp": 300,
},
blocking=True,
)
@ -200,7 +200,7 @@ async def test_color_temperature_light(
node_id=light_node.node_id,
endpoint_id=1,
command=clusters.ColorControl.Commands.MoveToColorTemperature(
colorTemperature=3003,
colorTemperatureMireds=300,
transitionTime=0,
),
),

View File

@ -37,18 +37,20 @@ async def test_entry_diagnostics(
"hardware_id": REDACTED,
"hardware_revision": 4,
"firmware_version": {
"silabs": "1.1.2",
"wifi": "0.121.0",
"wifi_app": "3.3.0",
"silabs": "1.1.2",
"ti": None,
},
"missing_at": None,
"created_at": "2019-06-27T00:18:44.337000+00:00",
"updated_at": "2023-03-19T03:20:16.061000+00:00",
"system_id": 11111,
"firmware": {
"silabs": "1.1.2",
"wifi": "0.121.0",
"wifi_app": "3.3.0",
"silabs": "1.1.2",
"ti": None,
},
"links": {"system": 11111},
},
@ -59,18 +61,20 @@ async def test_entry_diagnostics(
"hardware_id": REDACTED,
"hardware_revision": 4,
"firmware_version": {
"silabs": "1.1.2",
"wifi": "0.121.0",
"wifi_app": "3.3.0",
"silabs": "1.1.2",
"ti": None,
},
"missing_at": None,
"created_at": "2019-04-30T01:43:50.497000+00:00",
"updated_at": "2023-01-02T19:09:58.251000+00:00",
"system_id": 11111,
"firmware": {
"silabs": "1.1.2",
"wifi": "0.121.0",
"wifi_app": "3.3.0",
"silabs": "1.1.2",
"ti": None,
},
"links": {"system": 11111},
},

View File

@ -70,10 +70,13 @@ async def test_imperial_metric(
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
async def test_none_values(hass: HomeAssistant, mock_simple_nws, no_weather) -> None:
@pytest.mark.parametrize("values", [NONE_OBSERVATION, None])
async def test_none_values(
hass: HomeAssistant, mock_simple_nws, no_weather, values
) -> None:
"""Test with no values."""
instance = mock_simple_nws.return_value
instance.observation = NONE_OBSERVATION
instance.observation = values
registry = er.async_get(hass)

View File

@ -53,6 +53,7 @@ async def test_ssdp_flow_dispatched_on_st(
"usn": "uuid:mock-udn::mock-st",
"server": "mock-server",
"ext": "",
"_source": "search",
}
)
ssdp_listener = await init_ssdp_component(hass)
@ -96,6 +97,7 @@ async def test_ssdp_flow_dispatched_on_manufacturer_url(
"usn": "uuid:mock-udn::mock-st",
"server": "mock-server",
"ext": "",
"_source": "search",
}
)
ssdp_listener = await init_ssdp_component(hass)
@ -149,6 +151,7 @@ async def test_scan_match_upnp_devicedesc_manufacturer(
"st": "mock-st",
"location": "http://1.1.1.1",
"usn": "uuid:mock-udn::mock-st",
"_source": "search",
}
)
ssdp_listener = await init_ssdp_component(hass)
@ -193,6 +196,7 @@ async def test_scan_match_upnp_devicedesc_devicetype(
"st": "mock-st",
"location": "http://1.1.1.1",
"usn": "uuid:mock-udn::mock-st",
"_source": "search",
}
)
ssdp_listener = await init_ssdp_component(hass)
@ -290,6 +294,7 @@ async def test_scan_not_all_match(
"st": "mock-st",
"location": "http://1.1.1.1",
"usn": "uuid:mock-udn::mock-st",
"_source": "search",
}
)
ssdp_listener = await init_ssdp_component(hass)
@ -333,6 +338,7 @@ async def test_flow_start_only_alive(
"st": "mock-st",
"location": "http://1.1.1.1",
"usn": "uuid:mock-udn::mock-st",
"_source": "search",
}
)
ssdp_listener._on_search(mock_ssdp_search_response)
@ -350,6 +356,7 @@ async def test_flow_start_only_alive(
"usn": "uuid:mock-udn::mock-st",
"nt": "upnp:rootdevice",
"nts": "ssdp:alive",
"_source": "advertisement",
}
)
ssdp_listener._on_alive(mock_ssdp_advertisement)
@ -407,6 +414,7 @@ async def test_discovery_from_advertisement_sets_ssdp_st(
"nts": "ssdp:alive",
"location": "http://1.1.1.1",
"usn": "uuid:mock-udn::mock-st",
"_source": "advertisement",
}
)
ssdp_listener._on_alive(mock_ssdp_advertisement)
@ -481,6 +489,7 @@ async def test_scan_with_registered_callback(
"server": "mock-server",
"x-rincon-bootseq": "55",
"ext": "",
"_source": "search",
}
)
ssdp_listener = await init_ssdp_component(hass)
@ -577,6 +586,7 @@ async def test_getting_existing_headers(
"USN": "uuid:TIVRTLSR7ANF-D6E-1557809135086-RETAIL::urn:mdx-netflix-com:service:target:3",
"SERVER": "mock-server",
"EXT": "",
"_source": "search",
}
)
ssdp_listener = await init_ssdp_component(hass)
@ -818,6 +828,7 @@ async def test_flow_dismiss_on_byebye(
"st": "mock-st",
"location": "http://1.1.1.1",
"usn": "uuid:mock-udn::mock-st",
"_source": "search",
}
)
ssdp_listener._on_search(mock_ssdp_search_response)
@ -835,6 +846,7 @@ async def test_flow_dismiss_on_byebye(
"usn": "uuid:mock-udn::mock-st",
"nt": "upnp:rootdevice",
"nts": "ssdp:alive",
"_source": "advertisement",
}
)
ssdp_listener._on_alive(mock_ssdp_advertisement)