mirror of
https://github.com/home-assistant/core.git
synced 2025-07-24 05:37:44 +00:00
2023.11.3 (#104348)
This commit is contained in:
commit
ef89d1cd3d
@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["accuweather"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["accuweather==2.0.0"]
|
||||
"requirements": ["accuweather==2.1.0"]
|
||||
}
|
||||
|
@ -857,16 +857,18 @@ class AlexaInputController(AlexaCapability):
|
||||
|
||||
def inputs(self) -> list[dict[str, str]] | None:
|
||||
"""Return the list of valid supported inputs."""
|
||||
source_list: list[str] = self.entity.attributes.get(
|
||||
source_list: list[Any] = self.entity.attributes.get(
|
||||
media_player.ATTR_INPUT_SOURCE_LIST, []
|
||||
)
|
||||
return AlexaInputController.get_valid_inputs(source_list)
|
||||
|
||||
@staticmethod
|
||||
def get_valid_inputs(source_list: list[str]) -> list[dict[str, str]]:
|
||||
def get_valid_inputs(source_list: list[Any]) -> list[dict[str, str]]:
|
||||
"""Return list of supported inputs."""
|
||||
input_list: list[dict[str, str]] = []
|
||||
for source in source_list:
|
||||
if not isinstance(source, str):
|
||||
continue
|
||||
formatted_source = (
|
||||
source.lower().replace("-", "").replace("_", "").replace(" ", "")
|
||||
)
|
||||
|
@ -124,6 +124,7 @@ class BluetoothManager:
|
||||
"storage",
|
||||
"slot_manager",
|
||||
"_debug",
|
||||
"shutdown",
|
||||
)
|
||||
|
||||
def __init__(
|
||||
@ -165,6 +166,7 @@ class BluetoothManager:
|
||||
self.storage = storage
|
||||
self.slot_manager = slot_manager
|
||||
self._debug = _LOGGER.isEnabledFor(logging.DEBUG)
|
||||
self.shutdown = False
|
||||
|
||||
@property
|
||||
def supports_passive_scan(self) -> bool:
|
||||
@ -259,6 +261,7 @@ class BluetoothManager:
|
||||
def async_stop(self, event: Event) -> None:
|
||||
"""Stop the Bluetooth integration at shutdown."""
|
||||
_LOGGER.debug("Stopping bluetooth manager")
|
||||
self.shutdown = True
|
||||
if self._cancel_unavailable_tracking:
|
||||
self._cancel_unavailable_tracking()
|
||||
self._cancel_unavailable_tracking = None
|
||||
|
@ -270,6 +270,10 @@ class HaBleakClientWrapper(BleakClient):
|
||||
"""Connect to the specified GATT server."""
|
||||
assert models.MANAGER is not None
|
||||
manager = models.MANAGER
|
||||
if manager.shutdown:
|
||||
raise BleakError("Bluetooth is already shutdown")
|
||||
if debug_logging := _LOGGER.isEnabledFor(logging.DEBUG):
|
||||
_LOGGER.debug("%s: Looking for backend to connect", self.__address)
|
||||
wrapped_backend = self._async_get_best_available_backend_and_device(manager)
|
||||
device = wrapped_backend.device
|
||||
scanner = wrapped_backend.scanner
|
||||
@ -281,12 +285,14 @@ class HaBleakClientWrapper(BleakClient):
|
||||
timeout=self.__timeout,
|
||||
hass=manager.hass,
|
||||
)
|
||||
if debug_logging := _LOGGER.isEnabledFor(logging.DEBUG):
|
||||
if debug_logging:
|
||||
# Only lookup the description if we are going to log it
|
||||
description = ble_device_description(device)
|
||||
_, adv = scanner.discovered_devices_and_advertisement_data[device.address]
|
||||
rssi = adv.rssi
|
||||
_LOGGER.debug("%s: Connecting (last rssi: %s)", description, rssi)
|
||||
_LOGGER.debug(
|
||||
"%s: Connecting via %s (last rssi: %s)", description, scanner.name, rssi
|
||||
)
|
||||
connected = None
|
||||
try:
|
||||
connected = await super().connect(**kwargs)
|
||||
@ -301,7 +307,9 @@ class HaBleakClientWrapper(BleakClient):
|
||||
manager.async_release_connection_slot(device)
|
||||
|
||||
if debug_logging:
|
||||
_LOGGER.debug("%s: Connected (last rssi: %s)", description, rssi)
|
||||
_LOGGER.debug(
|
||||
"%s: Connected via %s (last rssi: %s)", description, scanner.name, rssi
|
||||
)
|
||||
return connected
|
||||
|
||||
@hass_callback
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/bmw_connected_drive",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["bimmer_connected"],
|
||||
"requirements": ["bimmer-connected==0.14.2"]
|
||||
"requirements": ["bimmer-connected==0.14.3"]
|
||||
}
|
||||
|
@ -7,7 +7,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/bosch_shc",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["boschshcpy"],
|
||||
"requirements": ["boschshcpy==0.2.57"],
|
||||
"requirements": ["boschshcpy==0.2.75"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_http._tcp.local.",
|
||||
|
@ -24,7 +24,7 @@
|
||||
"location": {
|
||||
"name": "Location"
|
||||
},
|
||||
"messages": {
|
||||
"message": {
|
||||
"name": "Message"
|
||||
},
|
||||
"start_time": {
|
||||
|
@ -14,6 +14,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/cast",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["casttube", "pychromecast"],
|
||||
"requirements": ["PyChromecast==13.0.7"],
|
||||
"requirements": ["PyChromecast==13.0.8"],
|
||||
"zeroconf": ["_googlecast._tcp.local."]
|
||||
}
|
||||
|
@ -17,6 +17,7 @@ import homeassistant.helpers.config_validation as cv
|
||||
from . import get_accounts
|
||||
from .const import (
|
||||
API_ACCOUNT_CURRENCY,
|
||||
API_ACCOUNT_CURRENCY_CODE,
|
||||
API_RATES,
|
||||
API_RESOURCE_TYPE,
|
||||
API_TYPE_VAULT,
|
||||
@ -81,7 +82,7 @@ async def validate_options(
|
||||
accounts = await hass.async_add_executor_job(get_accounts, client)
|
||||
|
||||
accounts_currencies = [
|
||||
account[API_ACCOUNT_CURRENCY]
|
||||
account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE]
|
||||
for account in accounts
|
||||
if account[API_RESOURCE_TYPE] != API_TYPE_VAULT
|
||||
]
|
||||
|
@ -12,14 +12,16 @@ DOMAIN = "coinbase"
|
||||
API_ACCOUNT_AMOUNT = "amount"
|
||||
API_ACCOUNT_BALANCE = "balance"
|
||||
API_ACCOUNT_CURRENCY = "currency"
|
||||
API_ACCOUNT_CURRENCY_CODE = "code"
|
||||
API_ACCOUNT_ID = "id"
|
||||
API_ACCOUNT_NATIVE_BALANCE = "native_balance"
|
||||
API_ACCOUNT_NATIVE_BALANCE = "balance"
|
||||
API_ACCOUNT_NAME = "name"
|
||||
API_ACCOUNTS_DATA = "data"
|
||||
API_RATES = "rates"
|
||||
API_RESOURCE_PATH = "resource_path"
|
||||
API_RESOURCE_TYPE = "type"
|
||||
API_TYPE_VAULT = "vault"
|
||||
API_USD = "USD"
|
||||
|
||||
WALLETS = {
|
||||
"1INCH": "1INCH",
|
||||
|
@ -14,9 +14,9 @@ from .const import (
|
||||
API_ACCOUNT_AMOUNT,
|
||||
API_ACCOUNT_BALANCE,
|
||||
API_ACCOUNT_CURRENCY,
|
||||
API_ACCOUNT_CURRENCY_CODE,
|
||||
API_ACCOUNT_ID,
|
||||
API_ACCOUNT_NAME,
|
||||
API_ACCOUNT_NATIVE_BALANCE,
|
||||
API_RATES,
|
||||
API_RESOURCE_TYPE,
|
||||
API_TYPE_VAULT,
|
||||
@ -55,7 +55,7 @@ async def async_setup_entry(
|
||||
entities: list[SensorEntity] = []
|
||||
|
||||
provided_currencies: list[str] = [
|
||||
account[API_ACCOUNT_CURRENCY]
|
||||
account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE]
|
||||
for account in instance.accounts
|
||||
if account[API_RESOURCE_TYPE] != API_TYPE_VAULT
|
||||
]
|
||||
@ -106,26 +106,28 @@ class AccountSensor(SensorEntity):
|
||||
self._currency = currency
|
||||
for account in coinbase_data.accounts:
|
||||
if (
|
||||
account[API_ACCOUNT_CURRENCY] != currency
|
||||
account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE] != currency
|
||||
or account[API_RESOURCE_TYPE] == API_TYPE_VAULT
|
||||
):
|
||||
continue
|
||||
self._attr_name = f"Coinbase {account[API_ACCOUNT_NAME]}"
|
||||
self._attr_unique_id = (
|
||||
f"coinbase-{account[API_ACCOUNT_ID]}-wallet-"
|
||||
f"{account[API_ACCOUNT_CURRENCY]}"
|
||||
f"{account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE]}"
|
||||
)
|
||||
self._attr_native_value = account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT]
|
||||
self._attr_native_unit_of_measurement = account[API_ACCOUNT_CURRENCY]
|
||||
self._attr_native_unit_of_measurement = account[API_ACCOUNT_CURRENCY][
|
||||
API_ACCOUNT_CURRENCY_CODE
|
||||
]
|
||||
self._attr_icon = CURRENCY_ICONS.get(
|
||||
account[API_ACCOUNT_CURRENCY], DEFAULT_COIN_ICON
|
||||
account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE],
|
||||
DEFAULT_COIN_ICON,
|
||||
)
|
||||
self._native_balance = round(
|
||||
float(account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT])
|
||||
/ float(coinbase_data.exchange_rates[API_RATES][currency]),
|
||||
2,
|
||||
)
|
||||
self._native_balance = account[API_ACCOUNT_NATIVE_BALANCE][
|
||||
API_ACCOUNT_AMOUNT
|
||||
]
|
||||
self._native_currency = account[API_ACCOUNT_NATIVE_BALANCE][
|
||||
API_ACCOUNT_CURRENCY
|
||||
]
|
||||
break
|
||||
|
||||
self._attr_state_class = SensorStateClass.TOTAL
|
||||
@ -141,7 +143,7 @@ class AccountSensor(SensorEntity):
|
||||
def extra_state_attributes(self) -> dict[str, str]:
|
||||
"""Return the state attributes of the sensor."""
|
||||
return {
|
||||
ATTR_NATIVE_BALANCE: f"{self._native_balance} {self._native_currency}",
|
||||
ATTR_NATIVE_BALANCE: f"{self._native_balance} {self._coinbase_data.exchange_base}",
|
||||
}
|
||||
|
||||
def update(self) -> None:
|
||||
@ -149,17 +151,17 @@ class AccountSensor(SensorEntity):
|
||||
self._coinbase_data.update()
|
||||
for account in self._coinbase_data.accounts:
|
||||
if (
|
||||
account[API_ACCOUNT_CURRENCY] != self._currency
|
||||
account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE]
|
||||
!= self._currency
|
||||
or account[API_RESOURCE_TYPE] == API_TYPE_VAULT
|
||||
):
|
||||
continue
|
||||
self._attr_native_value = account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT]
|
||||
self._native_balance = account[API_ACCOUNT_NATIVE_BALANCE][
|
||||
API_ACCOUNT_AMOUNT
|
||||
]
|
||||
self._native_currency = account[API_ACCOUNT_NATIVE_BALANCE][
|
||||
API_ACCOUNT_CURRENCY
|
||||
]
|
||||
self._native_balance = round(
|
||||
float(account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT])
|
||||
/ float(self._coinbase_data.exchange_rates[API_RATES][self._currency]),
|
||||
2,
|
||||
)
|
||||
break
|
||||
|
||||
|
||||
|
@ -109,7 +109,7 @@ class ComelitCoverEntity(
|
||||
if not self.is_closing and not self.is_opening:
|
||||
return
|
||||
|
||||
action = STATE_OFF if self.is_closing else STATE_ON
|
||||
action = STATE_ON if self.is_closing else STATE_OFF
|
||||
await self._api.set_device_status(COVER, self._device.index, action)
|
||||
|
||||
@callback
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/comelit",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiocomelit"],
|
||||
"requirements": ["aiocomelit==0.3.0"]
|
||||
"requirements": ["aiocomelit==0.5.2"]
|
||||
}
|
||||
|
@ -25,9 +25,9 @@ see:
|
||||
gps_accuracy:
|
||||
selector:
|
||||
number:
|
||||
min: 1
|
||||
max: 100
|
||||
unit_of_measurement: "%"
|
||||
min: 0
|
||||
mode: box
|
||||
unit_of_measurement: "m"
|
||||
battery:
|
||||
selector:
|
||||
number:
|
||||
|
@ -676,19 +676,20 @@ def get_entity_state_dict(config: Config, entity: State) -> dict[str, Any]:
|
||||
@lru_cache(maxsize=512)
|
||||
def _build_entity_state_dict(entity: State) -> dict[str, Any]:
|
||||
"""Build a state dict for an entity."""
|
||||
is_on = entity.state != STATE_OFF
|
||||
data: dict[str, Any] = {
|
||||
STATE_ON: entity.state != STATE_OFF,
|
||||
STATE_ON: is_on,
|
||||
STATE_BRIGHTNESS: None,
|
||||
STATE_HUE: None,
|
||||
STATE_SATURATION: None,
|
||||
STATE_COLOR_TEMP: None,
|
||||
}
|
||||
if data[STATE_ON]:
|
||||
attributes = entity.attributes
|
||||
if is_on:
|
||||
data[STATE_BRIGHTNESS] = hass_to_hue_brightness(
|
||||
entity.attributes.get(ATTR_BRIGHTNESS, 0)
|
||||
attributes.get(ATTR_BRIGHTNESS) or 0
|
||||
)
|
||||
hue_sat = entity.attributes.get(ATTR_HS_COLOR)
|
||||
if hue_sat is not None:
|
||||
if (hue_sat := attributes.get(ATTR_HS_COLOR)) is not None:
|
||||
hue = hue_sat[0]
|
||||
sat = hue_sat[1]
|
||||
# Convert hass hs values back to hue hs values
|
||||
@ -697,7 +698,7 @@ def _build_entity_state_dict(entity: State) -> dict[str, Any]:
|
||||
else:
|
||||
data[STATE_HUE] = HUE_API_STATE_HUE_MIN
|
||||
data[STATE_SATURATION] = HUE_API_STATE_SAT_MIN
|
||||
data[STATE_COLOR_TEMP] = entity.attributes.get(ATTR_COLOR_TEMP, 0)
|
||||
data[STATE_COLOR_TEMP] = attributes.get(ATTR_COLOR_TEMP) or 0
|
||||
|
||||
else:
|
||||
data[STATE_BRIGHTNESS] = 0
|
||||
@ -706,25 +707,23 @@ def _build_entity_state_dict(entity: State) -> dict[str, Any]:
|
||||
data[STATE_COLOR_TEMP] = 0
|
||||
|
||||
if entity.domain == climate.DOMAIN:
|
||||
temperature = entity.attributes.get(ATTR_TEMPERATURE, 0)
|
||||
temperature = attributes.get(ATTR_TEMPERATURE, 0)
|
||||
# Convert 0-100 to 0-254
|
||||
data[STATE_BRIGHTNESS] = round(temperature * HUE_API_STATE_BRI_MAX / 100)
|
||||
elif entity.domain == humidifier.DOMAIN:
|
||||
humidity = entity.attributes.get(ATTR_HUMIDITY, 0)
|
||||
humidity = attributes.get(ATTR_HUMIDITY, 0)
|
||||
# Convert 0-100 to 0-254
|
||||
data[STATE_BRIGHTNESS] = round(humidity * HUE_API_STATE_BRI_MAX / 100)
|
||||
elif entity.domain == media_player.DOMAIN:
|
||||
level = entity.attributes.get(
|
||||
ATTR_MEDIA_VOLUME_LEVEL, 1.0 if data[STATE_ON] else 0.0
|
||||
)
|
||||
level = attributes.get(ATTR_MEDIA_VOLUME_LEVEL, 1.0 if is_on else 0.0)
|
||||
# Convert 0.0-1.0 to 0-254
|
||||
data[STATE_BRIGHTNESS] = round(min(1.0, level) * HUE_API_STATE_BRI_MAX)
|
||||
elif entity.domain == fan.DOMAIN:
|
||||
percentage = entity.attributes.get(ATTR_PERCENTAGE) or 0
|
||||
percentage = attributes.get(ATTR_PERCENTAGE) or 0
|
||||
# Convert 0-100 to 0-254
|
||||
data[STATE_BRIGHTNESS] = round(percentage * HUE_API_STATE_BRI_MAX / 100)
|
||||
elif entity.domain == cover.DOMAIN:
|
||||
level = entity.attributes.get(ATTR_CURRENT_POSITION, 0)
|
||||
level = attributes.get(ATTR_CURRENT_POSITION, 0)
|
||||
data[STATE_BRIGHTNESS] = round(level / 100 * HUE_API_STATE_BRI_MAX)
|
||||
_clamp_values(data)
|
||||
return data
|
||||
|
@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/enphase_envoy",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyenphase"],
|
||||
"requirements": ["pyenphase==1.14.2"],
|
||||
"requirements": ["pyenphase==1.14.3"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_enphase-envoy._tcp.local."
|
||||
|
@ -75,15 +75,13 @@ def verify_connected(func: _WrapFuncType) -> _WrapFuncType:
|
||||
self: ESPHomeClient, *args: Any, **kwargs: Any
|
||||
) -> Any:
|
||||
# pylint: disable=protected-access
|
||||
if not self._is_connected:
|
||||
raise BleakError(f"{self._description} is not connected")
|
||||
loop = self._loop
|
||||
disconnected_futures = self._disconnected_futures
|
||||
disconnected_future = loop.create_future()
|
||||
disconnected_futures.add(disconnected_future)
|
||||
ble_device = self._ble_device
|
||||
disconnect_message = (
|
||||
f"{self._source_name }: {ble_device.name} - {ble_device.address}: "
|
||||
"Disconnected during operation"
|
||||
)
|
||||
disconnect_message = f"{self._description}: Disconnected during operation"
|
||||
try:
|
||||
async with interrupt(disconnected_future, BleakError, disconnect_message):
|
||||
return await func(self, *args, **kwargs)
|
||||
@ -115,10 +113,8 @@ def api_error_as_bleak_error(func: _WrapFuncType) -> _WrapFuncType:
|
||||
if ex.error.error == -1:
|
||||
# pylint: disable=protected-access
|
||||
_LOGGER.debug(
|
||||
"%s: %s - %s: BLE device disconnected during %s operation",
|
||||
self._source_name,
|
||||
self._ble_device.name,
|
||||
self._ble_device.address,
|
||||
"%s: BLE device disconnected during %s operation",
|
||||
self._description,
|
||||
func.__name__,
|
||||
)
|
||||
self._async_ble_device_disconnected()
|
||||
@ -140,7 +136,7 @@ class ESPHomeClientData:
|
||||
api_version: APIVersion
|
||||
title: str
|
||||
scanner: ESPHomeScanner | None
|
||||
disconnect_callbacks: list[Callable[[], None]] = field(default_factory=list)
|
||||
disconnect_callbacks: set[Callable[[], None]] = field(default_factory=set)
|
||||
|
||||
|
||||
class ESPHomeClient(BaseBleakClient):
|
||||
@ -159,10 +155,11 @@ class ESPHomeClient(BaseBleakClient):
|
||||
assert isinstance(address_or_ble_device, BLEDevice)
|
||||
super().__init__(address_or_ble_device, *args, **kwargs)
|
||||
self._loop = asyncio.get_running_loop()
|
||||
self._ble_device = address_or_ble_device
|
||||
self._address_as_int = mac_to_int(self._ble_device.address)
|
||||
assert self._ble_device.details is not None
|
||||
self._source = self._ble_device.details["source"]
|
||||
ble_device = address_or_ble_device
|
||||
self._ble_device = ble_device
|
||||
self._address_as_int = mac_to_int(ble_device.address)
|
||||
assert ble_device.details is not None
|
||||
self._source = ble_device.details["source"]
|
||||
self._cache = client_data.cache
|
||||
self._bluetooth_device = client_data.bluetooth_device
|
||||
self._client = client_data.client
|
||||
@ -177,8 +174,11 @@ class ESPHomeClient(BaseBleakClient):
|
||||
self._feature_flags = device_info.bluetooth_proxy_feature_flags_compat(
|
||||
client_data.api_version
|
||||
)
|
||||
self._address_type = address_or_ble_device.details["address_type"]
|
||||
self._address_type = ble_device.details["address_type"]
|
||||
self._source_name = f"{client_data.title} [{self._source}]"
|
||||
self._description = (
|
||||
f"{self._source_name}: {ble_device.name} - {ble_device.address}"
|
||||
)
|
||||
scanner = client_data.scanner
|
||||
assert scanner is not None
|
||||
self._scanner = scanner
|
||||
@ -196,12 +196,10 @@ class ESPHomeClient(BaseBleakClient):
|
||||
except (AssertionError, ValueError) as ex:
|
||||
_LOGGER.debug(
|
||||
(
|
||||
"%s: %s - %s: Failed to unsubscribe from connection state (likely"
|
||||
"%s: Failed to unsubscribe from connection state (likely"
|
||||
" connection dropped): %s"
|
||||
),
|
||||
self._source_name,
|
||||
self._ble_device.name,
|
||||
self._ble_device.address,
|
||||
self._description,
|
||||
ex,
|
||||
)
|
||||
self._cancel_connection_state = None
|
||||
@ -217,6 +215,7 @@ class ESPHomeClient(BaseBleakClient):
|
||||
if not future.done():
|
||||
future.set_result(None)
|
||||
self._disconnected_futures.clear()
|
||||
self._disconnect_callbacks.discard(self._async_esp_disconnected)
|
||||
self._unsubscribe_connection_state()
|
||||
|
||||
def _async_ble_device_disconnected(self) -> None:
|
||||
@ -224,23 +223,15 @@ class ESPHomeClient(BaseBleakClient):
|
||||
was_connected = self._is_connected
|
||||
self._async_disconnected_cleanup()
|
||||
if was_connected:
|
||||
_LOGGER.debug(
|
||||
"%s: %s - %s: BLE device disconnected",
|
||||
self._source_name,
|
||||
self._ble_device.name,
|
||||
self._ble_device.address,
|
||||
)
|
||||
_LOGGER.debug("%s: BLE device disconnected", self._description)
|
||||
self._async_call_bleak_disconnected_callback()
|
||||
|
||||
def _async_esp_disconnected(self) -> None:
|
||||
"""Handle the esp32 client disconnecting from us."""
|
||||
_LOGGER.debug(
|
||||
"%s: %s - %s: ESP device disconnected",
|
||||
self._source_name,
|
||||
self._ble_device.name,
|
||||
self._ble_device.address,
|
||||
)
|
||||
self._disconnect_callbacks.remove(self._async_esp_disconnected)
|
||||
_LOGGER.debug("%s: ESP device disconnected", self._description)
|
||||
# Calling _async_ble_device_disconnected calls
|
||||
# _async_disconnected_cleanup which will also remove
|
||||
# the disconnect callbacks
|
||||
self._async_ble_device_disconnected()
|
||||
|
||||
def _async_call_bleak_disconnected_callback(self) -> None:
|
||||
@ -258,10 +249,8 @@ class ESPHomeClient(BaseBleakClient):
|
||||
) -> None:
|
||||
"""Handle a connect or disconnect."""
|
||||
_LOGGER.debug(
|
||||
"%s: %s - %s: Connection state changed to connected=%s mtu=%s error=%s",
|
||||
self._source_name,
|
||||
self._ble_device.name,
|
||||
self._ble_device.address,
|
||||
"%s: Connection state changed to connected=%s mtu=%s error=%s",
|
||||
self._description,
|
||||
connected,
|
||||
mtu,
|
||||
error,
|
||||
@ -300,12 +289,10 @@ class ESPHomeClient(BaseBleakClient):
|
||||
return
|
||||
|
||||
_LOGGER.debug(
|
||||
"%s: %s - %s: connected, registering for disconnected callbacks",
|
||||
self._source_name,
|
||||
self._ble_device.name,
|
||||
self._ble_device.address,
|
||||
"%s: connected, registering for disconnected callbacks",
|
||||
self._description,
|
||||
)
|
||||
self._disconnect_callbacks.append(self._async_esp_disconnected)
|
||||
self._disconnect_callbacks.add(self._async_esp_disconnected)
|
||||
connected_future.set_result(connected)
|
||||
|
||||
@api_error_as_bleak_error
|
||||
@ -403,10 +390,8 @@ class ESPHomeClient(BaseBleakClient):
|
||||
if bluetooth_device.ble_connections_free:
|
||||
return
|
||||
_LOGGER.debug(
|
||||
"%s: %s - %s: Out of connection slots, waiting for a free one",
|
||||
self._source_name,
|
||||
self._ble_device.name,
|
||||
self._ble_device.address,
|
||||
"%s: Out of connection slots, waiting for a free one",
|
||||
self._description,
|
||||
)
|
||||
async with asyncio.timeout(timeout):
|
||||
await bluetooth_device.wait_for_ble_connections_free()
|
||||
@ -434,7 +419,7 @@ class ESPHomeClient(BaseBleakClient):
|
||||
if response.paired:
|
||||
return True
|
||||
_LOGGER.error(
|
||||
"Pairing with %s failed due to error: %s", self.address, response.error
|
||||
"%s: Pairing failed due to error: %s", self._description, response.error
|
||||
)
|
||||
return False
|
||||
|
||||
@ -451,7 +436,7 @@ class ESPHomeClient(BaseBleakClient):
|
||||
if response.success:
|
||||
return True
|
||||
_LOGGER.error(
|
||||
"Unpairing with %s failed due to error: %s", self.address, response.error
|
||||
"%s: Unpairing failed due to error: %s", self._description, response.error
|
||||
)
|
||||
return False
|
||||
|
||||
@ -486,30 +471,14 @@ class ESPHomeClient(BaseBleakClient):
|
||||
self._feature_flags & BluetoothProxyFeature.REMOTE_CACHING
|
||||
or dangerous_use_bleak_cache
|
||||
) and (cached_services := cache.get_gatt_services_cache(address_as_int)):
|
||||
_LOGGER.debug(
|
||||
"%s: %s - %s: Cached services hit",
|
||||
self._source_name,
|
||||
self._ble_device.name,
|
||||
self._ble_device.address,
|
||||
)
|
||||
_LOGGER.debug("%s: Cached services hit", self._description)
|
||||
self.services = cached_services
|
||||
return self.services
|
||||
_LOGGER.debug(
|
||||
"%s: %s - %s: Cached services miss",
|
||||
self._source_name,
|
||||
self._ble_device.name,
|
||||
self._ble_device.address,
|
||||
)
|
||||
_LOGGER.debug("%s: Cached services miss", self._description)
|
||||
esphome_services = await self._client.bluetooth_gatt_get_services(
|
||||
address_as_int
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"%s: %s - %s: Got services: %s",
|
||||
self._source_name,
|
||||
self._ble_device.name,
|
||||
self._ble_device.address,
|
||||
esphome_services,
|
||||
)
|
||||
_LOGGER.debug("%s: Got services: %s", self._description, esphome_services)
|
||||
max_write_without_response = self.mtu_size - GATT_HEADER_SIZE
|
||||
services = BleakGATTServiceCollection() # type: ignore[no-untyped-call]
|
||||
for service in esphome_services.services:
|
||||
@ -538,12 +507,7 @@ class ESPHomeClient(BaseBleakClient):
|
||||
raise BleakError("Failed to get services from remote esp")
|
||||
|
||||
self.services = services
|
||||
_LOGGER.debug(
|
||||
"%s: %s - %s: Cached services saved",
|
||||
self._source_name,
|
||||
self._ble_device.name,
|
||||
self._ble_device.address,
|
||||
)
|
||||
_LOGGER.debug("%s: Cached services saved", self._description)
|
||||
cache.set_gatt_services_cache(address_as_int, services)
|
||||
return services
|
||||
|
||||
@ -552,13 +516,15 @@ class ESPHomeClient(BaseBleakClient):
|
||||
) -> BleakGATTCharacteristic:
|
||||
"""Resolve a characteristic specifier to a BleakGATTCharacteristic object."""
|
||||
if (services := self.services) is None:
|
||||
raise BleakError("Services have not been resolved")
|
||||
raise BleakError(f"{self._description}: Services have not been resolved")
|
||||
if not isinstance(char_specifier, BleakGATTCharacteristic):
|
||||
characteristic = services.get_characteristic(char_specifier)
|
||||
else:
|
||||
characteristic = char_specifier
|
||||
if not characteristic:
|
||||
raise BleakError(f"Characteristic {char_specifier} was not found!")
|
||||
raise BleakError(
|
||||
f"{self._description}: Characteristic {char_specifier} was not found!"
|
||||
)
|
||||
return characteristic
|
||||
|
||||
@verify_connected
|
||||
@ -579,8 +545,8 @@ class ESPHomeClient(BaseBleakClient):
|
||||
if response.success:
|
||||
return True
|
||||
_LOGGER.error(
|
||||
"Clear cache failed with %s failed due to error: %s",
|
||||
self.address,
|
||||
"%s: Clear cache failed due to error: %s",
|
||||
self._description,
|
||||
response.error,
|
||||
)
|
||||
return False
|
||||
@ -692,7 +658,7 @@ class ESPHomeClient(BaseBleakClient):
|
||||
ble_handle = characteristic.handle
|
||||
if ble_handle in self._notify_cancels:
|
||||
raise BleakError(
|
||||
"Notifications are already enabled on "
|
||||
f"{self._description}: Notifications are already enabled on "
|
||||
f"service:{characteristic.service_uuid} "
|
||||
f"characteristic:{characteristic.uuid} "
|
||||
f"handle:{ble_handle}"
|
||||
@ -702,8 +668,8 @@ class ESPHomeClient(BaseBleakClient):
|
||||
and "indicate" not in characteristic.properties
|
||||
):
|
||||
raise BleakError(
|
||||
f"Characteristic {characteristic.uuid} does not have notify or indicate"
|
||||
" property set."
|
||||
f"{self._description}: Characteristic {characteristic.uuid} "
|
||||
"does not have notify or indicate property set."
|
||||
)
|
||||
|
||||
self._notify_cancels[
|
||||
@ -725,18 +691,13 @@ class ESPHomeClient(BaseBleakClient):
|
||||
cccd_descriptor = characteristic.get_descriptor(CCCD_UUID)
|
||||
if not cccd_descriptor:
|
||||
raise BleakError(
|
||||
f"Characteristic {characteristic.uuid} does not have a "
|
||||
"characteristic client config descriptor."
|
||||
f"{self._description}: Characteristic {characteristic.uuid} "
|
||||
"does not have a characteristic client config descriptor."
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
(
|
||||
"%s: %s - %s: Writing to CCD descriptor %s for notifications with"
|
||||
" properties=%s"
|
||||
),
|
||||
self._source_name,
|
||||
self._ble_device.name,
|
||||
self._ble_device.address,
|
||||
"%s: Writing to CCD descriptor %s for notifications with properties=%s",
|
||||
self._description,
|
||||
cccd_descriptor.handle,
|
||||
characteristic.properties,
|
||||
)
|
||||
@ -774,12 +735,10 @@ class ESPHomeClient(BaseBleakClient):
|
||||
if self._cancel_connection_state:
|
||||
_LOGGER.warning(
|
||||
(
|
||||
"%s: %s - %s: ESPHomeClient bleak client was not properly"
|
||||
"%s: ESPHomeClient bleak client was not properly"
|
||||
" disconnected before destruction"
|
||||
),
|
||||
self._source_name,
|
||||
self._ble_device.name,
|
||||
self._ble_device.address,
|
||||
self._description,
|
||||
)
|
||||
if not self._loop.is_closed():
|
||||
self._loop.call_soon_threadsafe(self._async_disconnected_cleanup)
|
||||
|
@ -107,7 +107,7 @@ class RuntimeEntryData:
|
||||
bluetooth_device: ESPHomeBluetoothDevice | None = None
|
||||
api_version: APIVersion = field(default_factory=APIVersion)
|
||||
cleanup_callbacks: list[Callable[[], None]] = field(default_factory=list)
|
||||
disconnect_callbacks: list[Callable[[], None]] = field(default_factory=list)
|
||||
disconnect_callbacks: set[Callable[[], None]] = field(default_factory=set)
|
||||
state_subscriptions: dict[
|
||||
tuple[type[EntityState], int], Callable[[], None]
|
||||
] = field(default_factory=dict)
|
||||
@ -427,3 +427,19 @@ class RuntimeEntryData:
|
||||
if self.original_options == entry.options:
|
||||
return
|
||||
hass.async_create_task(hass.config_entries.async_reload(entry.entry_id))
|
||||
|
||||
@callback
|
||||
def async_on_disconnect(self) -> None:
|
||||
"""Call when the entry has been disconnected.
|
||||
|
||||
Safe to call multiple times.
|
||||
"""
|
||||
self.available = False
|
||||
# Make a copy since calling the disconnect callbacks
|
||||
# may also try to discard/remove themselves.
|
||||
for disconnect_cb in self.disconnect_callbacks.copy():
|
||||
disconnect_cb()
|
||||
# Make sure to clear the set to give up the reference
|
||||
# to it and make sure all the callbacks can be GC'd.
|
||||
self.disconnect_callbacks.clear()
|
||||
self.disconnect_callbacks = set()
|
||||
|
@ -294,7 +294,7 @@ class ESPHomeManager:
|
||||
event.data["entity_id"], attribute, new_state
|
||||
)
|
||||
|
||||
self.entry_data.disconnect_callbacks.append(
|
||||
self.entry_data.disconnect_callbacks.add(
|
||||
async_track_state_change_event(
|
||||
hass, [entity_id], send_home_assistant_state_event
|
||||
)
|
||||
@ -439,7 +439,7 @@ class ESPHomeManager:
|
||||
reconnect_logic.name = device_info.name
|
||||
|
||||
if device_info.bluetooth_proxy_feature_flags_compat(cli.api_version):
|
||||
entry_data.disconnect_callbacks.append(
|
||||
entry_data.disconnect_callbacks.add(
|
||||
await async_connect_scanner(
|
||||
hass, entry, cli, entry_data, self.domain_data.bluetooth_cache
|
||||
)
|
||||
@ -459,7 +459,7 @@ class ESPHomeManager:
|
||||
await cli.subscribe_home_assistant_states(self.async_on_state_subscription)
|
||||
|
||||
if device_info.voice_assistant_version:
|
||||
entry_data.disconnect_callbacks.append(
|
||||
entry_data.disconnect_callbacks.add(
|
||||
await cli.subscribe_voice_assistant(
|
||||
self._handle_pipeline_start,
|
||||
self._handle_pipeline_stop,
|
||||
@ -487,10 +487,7 @@ class ESPHomeManager:
|
||||
host,
|
||||
expected_disconnect,
|
||||
)
|
||||
for disconnect_cb in entry_data.disconnect_callbacks:
|
||||
disconnect_cb()
|
||||
entry_data.disconnect_callbacks = []
|
||||
entry_data.available = False
|
||||
entry_data.async_on_disconnect()
|
||||
entry_data.expected_disconnect = expected_disconnect
|
||||
# Mark state as stale so that we will always dispatch
|
||||
# the next state update of that type when the device reconnects
|
||||
@ -755,10 +752,7 @@ async def cleanup_instance(hass: HomeAssistant, entry: ConfigEntry) -> RuntimeEn
|
||||
"""Cleanup the esphome client if it exists."""
|
||||
domain_data = DomainData.get(hass)
|
||||
data = domain_data.pop_entry_data(entry)
|
||||
data.available = False
|
||||
for disconnect_cb in data.disconnect_callbacks:
|
||||
disconnect_cb()
|
||||
data.disconnect_callbacks = []
|
||||
data.async_on_disconnect()
|
||||
for cleanup_callback in data.cleanup_callbacks:
|
||||
cleanup_callback()
|
||||
await data.async_cleanup()
|
||||
|
@ -69,7 +69,7 @@ class FitbitApi(ABC):
|
||||
profile = response["user"]
|
||||
self._profile = FitbitProfile(
|
||||
encoded_id=profile["encodedId"],
|
||||
full_name=profile["fullName"],
|
||||
display_name=profile["displayName"],
|
||||
locale=profile.get("locale"),
|
||||
)
|
||||
return self._profile
|
||||
|
@ -90,7 +90,7 @@ class OAuth2FlowHandler(
|
||||
|
||||
await self.async_set_unique_id(profile.encoded_id)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(title=profile.full_name, data=data)
|
||||
return self.async_create_entry(title=profile.display_name, data=data)
|
||||
|
||||
async def async_step_import(self, data: dict[str, Any]) -> FlowResult:
|
||||
"""Handle import from YAML."""
|
||||
|
@ -14,8 +14,8 @@ class FitbitProfile:
|
||||
encoded_id: str
|
||||
"""The ID representing the Fitbit user."""
|
||||
|
||||
full_name: str
|
||||
"""The first name value specified in the user's account settings."""
|
||||
display_name: str
|
||||
"""The name shown when the user's friends look at their Fitbit profile."""
|
||||
|
||||
locale: str | None
|
||||
"""The locale defined in the user's Fitbit account settings."""
|
||||
|
@ -134,6 +134,17 @@ def _water_unit(unit_system: FitbitUnitSystem) -> UnitOfVolume:
|
||||
return UnitOfVolume.MILLILITERS
|
||||
|
||||
|
||||
def _int_value_or_none(field: str) -> Callable[[dict[str, Any]], int | None]:
|
||||
"""Value function that will parse the specified field if present."""
|
||||
|
||||
def convert(result: dict[str, Any]) -> int | None:
|
||||
if (value := result["value"].get(field)) is not None:
|
||||
return int(value)
|
||||
return None
|
||||
|
||||
return convert
|
||||
|
||||
|
||||
@dataclass
|
||||
class FitbitSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes Fitbit sensor entity."""
|
||||
@ -206,7 +217,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = (
|
||||
name="Resting Heart Rate",
|
||||
native_unit_of_measurement="bpm",
|
||||
icon="mdi:heart-pulse",
|
||||
value_fn=lambda result: int(result["value"]["restingHeartRate"]),
|
||||
value_fn=_int_value_or_none("restingHeartRate"),
|
||||
scope=FitbitScope.HEART_RATE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
|
@ -521,8 +521,13 @@ class GoogleCalendarEntity(
|
||||
def _get_calendar_event(event: Event) -> CalendarEvent:
|
||||
"""Return a CalendarEvent from an API event."""
|
||||
rrule: str | None = None
|
||||
if len(event.recurrence) == 1:
|
||||
rrule = event.recurrence[0].lstrip(RRULE_PREFIX)
|
||||
# Home Assistant expects a single RRULE: and all other rule types are unsupported or ignored
|
||||
if (
|
||||
len(event.recurrence) == 1
|
||||
and (raw_rule := event.recurrence[0])
|
||||
and raw_rule.startswith(RRULE_PREFIX)
|
||||
):
|
||||
rrule = raw_rule.removeprefix(RRULE_PREFIX)
|
||||
return CalendarEvent(
|
||||
uid=event.ical_uuid,
|
||||
recurrence_id=event.id if event.recurring_event_id else None,
|
||||
|
@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/calendar.google",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["googleapiclient"],
|
||||
"requirements": ["gcal-sync==5.0.0", "oauth2client==4.1.3"]
|
||||
"requirements": ["gcal-sync==6.0.1", "oauth2client==4.1.3"]
|
||||
}
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/honeywell",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["somecomfort"],
|
||||
"requirements": ["AIOSomecomfort==0.0.17"]
|
||||
"requirements": ["AIOSomecomfort==0.0.22"]
|
||||
}
|
||||
|
@ -242,5 +242,6 @@ class IpBanManager:
|
||||
|
||||
async def async_add_ban(self, remote_addr: IPv4Address | IPv6Address) -> None:
|
||||
"""Add a new IP address to the banned list."""
|
||||
new_ban = self.ip_bans_lookup[remote_addr] = IpBan(remote_addr)
|
||||
await self.hass.async_add_executor_job(self._add_ban, new_ban)
|
||||
if remote_addr not in self.ip_bans_lookup:
|
||||
new_ban = self.ip_bans_lookup[remote_addr] = IpBan(remote_addr)
|
||||
await self.hass.async_add_executor_job(self._add_ban, new_ban)
|
||||
|
@ -6,6 +6,7 @@ from collections.abc import Mapping
|
||||
from datetime import datetime, timedelta
|
||||
import email
|
||||
from email.header import decode_header, make_header
|
||||
from email.message import Message
|
||||
from email.utils import parseaddr, parsedate_to_datetime
|
||||
import logging
|
||||
from typing import Any
|
||||
@ -96,8 +97,9 @@ async def connect_to_server(data: Mapping[str, Any]) -> IMAP4_SSL:
|
||||
class ImapMessage:
|
||||
"""Class to parse an RFC822 email message."""
|
||||
|
||||
def __init__(self, raw_message: bytes) -> None:
|
||||
def __init__(self, raw_message: bytes, charset: str = "utf-8") -> None:
|
||||
"""Initialize IMAP message."""
|
||||
self._charset = charset
|
||||
self.email_message = email.message_from_bytes(raw_message)
|
||||
|
||||
@property
|
||||
@ -157,18 +159,30 @@ class ImapMessage:
|
||||
message_html: str | None = None
|
||||
message_untyped_text: str | None = None
|
||||
|
||||
def _decode_payload(part: Message) -> str:
|
||||
"""Try to decode text payloads.
|
||||
|
||||
Common text encodings are quoted-printable or base64.
|
||||
Falls back to the raw content part if decoding fails.
|
||||
"""
|
||||
try:
|
||||
return str(part.get_payload(decode=True).decode(self._charset))
|
||||
except Exception: # pylint: disable=broad-except
|
||||
return str(part.get_payload())
|
||||
|
||||
part: Message
|
||||
for part in self.email_message.walk():
|
||||
if part.get_content_type() == CONTENT_TYPE_TEXT_PLAIN:
|
||||
if message_text is None:
|
||||
message_text = part.get_payload()
|
||||
message_text = _decode_payload(part)
|
||||
elif part.get_content_type() == "text/html":
|
||||
if message_html is None:
|
||||
message_html = part.get_payload()
|
||||
message_html = _decode_payload(part)
|
||||
elif (
|
||||
part.get_content_type().startswith("text")
|
||||
and message_untyped_text is None
|
||||
):
|
||||
message_untyped_text = part.get_payload()
|
||||
message_untyped_text = str(part.get_payload())
|
||||
|
||||
if message_text is not None:
|
||||
return message_text
|
||||
@ -223,7 +237,9 @@ class ImapDataUpdateCoordinator(DataUpdateCoordinator[int | None]):
|
||||
"""Send a event for the last message if the last message was changed."""
|
||||
response = await self.imap_client.fetch(last_message_uid, "BODY.PEEK[]")
|
||||
if response.result == "OK":
|
||||
message = ImapMessage(response.lines[1])
|
||||
message = ImapMessage(
|
||||
response.lines[1], charset=self.config_entry.data[CONF_CHARSET]
|
||||
)
|
||||
# Set `initial` to `False` if the last message is triggered again
|
||||
initial: bool = True
|
||||
if (message_id := message.message_id) == self._last_message_id:
|
||||
|
@ -9,9 +9,9 @@ from typing import Any
|
||||
from ical.calendar import Calendar
|
||||
from ical.calendar_stream import IcsCalendarStream
|
||||
from ical.event import Event
|
||||
from ical.exceptions import CalendarParseError
|
||||
from ical.store import EventStore, EventStoreError
|
||||
from ical.types import Range, Recur
|
||||
from pydantic import ValidationError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.calendar import (
|
||||
@ -178,8 +178,8 @@ def _parse_event(event: dict[str, Any]) -> Event:
|
||||
event[key] = dt_util.as_local(value).replace(tzinfo=None)
|
||||
|
||||
try:
|
||||
return Event.parse_obj(event)
|
||||
except ValidationError as err:
|
||||
return Event(**event)
|
||||
except CalendarParseError as err:
|
||||
_LOGGER.debug("Error parsing event input fields: %s (%s)", event, str(err))
|
||||
raise vol.Invalid("Error parsing event input fields") from err
|
||||
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/local_calendar",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["ical"],
|
||||
"requirements": ["ical==5.1.0"]
|
||||
"requirements": ["ical==6.1.0"]
|
||||
}
|
||||
|
@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/local_todo",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["ical==5.1.0"]
|
||||
"requirements": ["ical==6.1.0"]
|
||||
}
|
||||
|
@ -7,9 +7,9 @@ from typing import Any
|
||||
|
||||
from ical.calendar import Calendar
|
||||
from ical.calendar_stream import IcsCalendarStream
|
||||
from ical.exceptions import CalendarParseError
|
||||
from ical.store import TodoStore
|
||||
from ical.todo import Todo, TodoStatus
|
||||
from pydantic import ValidationError
|
||||
|
||||
from homeassistant.components.todo import (
|
||||
TodoItem,
|
||||
@ -63,9 +63,11 @@ def _todo_dict_factory(obj: Iterable[tuple[str, Any]]) -> dict[str, str]:
|
||||
"""Convert TodoItem dataclass items to dictionary of attributes for ical consumption."""
|
||||
result: dict[str, str] = {}
|
||||
for name, value in obj:
|
||||
if value is None:
|
||||
continue
|
||||
if name == "status":
|
||||
result[name] = ICS_TODO_STATUS_MAP_INV[value]
|
||||
elif value is not None:
|
||||
else:
|
||||
result[name] = value
|
||||
return result
|
||||
|
||||
@ -74,7 +76,7 @@ def _convert_item(item: TodoItem) -> Todo:
|
||||
"""Convert a HomeAssistant TodoItem to an ical Todo."""
|
||||
try:
|
||||
return Todo(**dataclasses.asdict(item, dict_factory=_todo_dict_factory))
|
||||
except ValidationError as err:
|
||||
except CalendarParseError as err:
|
||||
_LOGGER.debug("Error parsing todo input fields: %s (%s)", item, err)
|
||||
raise HomeAssistantError("Error parsing todo input fields") from err
|
||||
|
||||
|
@ -27,7 +27,7 @@ def setup_platform(
|
||||
|
||||
data = hass.data[LUPUSEC_DOMAIN]
|
||||
|
||||
device_types = [CONST.TYPE_OPENING]
|
||||
device_types = CONST.TYPE_OPENING
|
||||
|
||||
devices = []
|
||||
for device in data.lupusec.get_devices(generic_type=device_types):
|
||||
|
@ -5,5 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/lupusec",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["lupupy"],
|
||||
"requirements": ["lupupy==0.3.0"]
|
||||
"requirements": ["lupupy==0.3.1"]
|
||||
}
|
||||
|
@ -28,7 +28,7 @@ def setup_platform(
|
||||
|
||||
data = hass.data[LUPUSEC_DOMAIN]
|
||||
|
||||
device_types = [CONST.TYPE_SWITCH]
|
||||
device_types = CONST.TYPE_SWITCH
|
||||
|
||||
devices = []
|
||||
for device in data.lupusec.get_devices(generic_type=device_types):
|
||||
|
@ -145,9 +145,7 @@ class MatterAdapter:
|
||||
get_clean_name(basic_info.nodeLabel)
|
||||
or get_clean_name(basic_info.productLabel)
|
||||
or get_clean_name(basic_info.productName)
|
||||
or device_type.__name__
|
||||
if device_type
|
||||
else None
|
||||
or (device_type.__name__ if device_type else None)
|
||||
)
|
||||
|
||||
# handle bridged devices
|
||||
|
@ -3,6 +3,7 @@ from __future__ import annotations
|
||||
|
||||
from abc import abstractmethod
|
||||
from collections.abc import Callable
|
||||
from contextlib import suppress
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
@ -110,7 +111,9 @@ class MatterEntity(Entity):
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
for unsub in self._unsubscribes:
|
||||
unsub()
|
||||
with suppress(ValueError):
|
||||
# suppress ValueError to prevent race conditions
|
||||
unsub()
|
||||
|
||||
@callback
|
||||
def _on_matter_event(self, event: EventType, data: Any = None) -> None:
|
||||
|
@ -6,5 +6,5 @@
|
||||
"dependencies": ["websocket_api"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/matter",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["python-matter-server==4.0.0"]
|
||||
"requirements": ["python-matter-server==4.0.2"]
|
||||
}
|
||||
|
@ -67,7 +67,15 @@ DISCOVERY_SCHEMAS = [
|
||||
),
|
||||
entity_class=MatterSwitch,
|
||||
required_attributes=(clusters.OnOff.Attributes.OnOff,),
|
||||
# restrict device type to prevent discovery by the wrong platform
|
||||
device_type=(device_types.OnOffPlugInUnit,),
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SWITCH,
|
||||
entity_description=SwitchEntityDescription(
|
||||
key="MatterSwitch", device_class=SwitchDeviceClass.SWITCH, name=None
|
||||
),
|
||||
entity_class=MatterSwitch,
|
||||
required_attributes=(clusters.OnOff.Attributes.OnOff,),
|
||||
not_device_type=(
|
||||
device_types.ColorTemperatureLight,
|
||||
device_types.DimmableLight,
|
||||
@ -76,7 +84,6 @@ DISCOVERY_SCHEMAS = [
|
||||
device_types.DoorLock,
|
||||
device_types.ColorDimmerSwitch,
|
||||
device_types.DimmerSwitch,
|
||||
device_types.OnOffLightSwitch,
|
||||
device_types.Thermostat,
|
||||
),
|
||||
),
|
||||
|
@ -256,7 +256,7 @@ def valid_humidity_state_configuration(config: ConfigType) -> ConfigType:
|
||||
CONF_HUMIDITY_STATE_TOPIC in config
|
||||
and CONF_HUMIDITY_COMMAND_TOPIC not in config
|
||||
):
|
||||
raise ValueError(
|
||||
raise vol.Invalid(
|
||||
f"{CONF_HUMIDITY_STATE_TOPIC} cannot be used without"
|
||||
f" {CONF_HUMIDITY_COMMAND_TOPIC}"
|
||||
)
|
||||
|
@ -367,10 +367,13 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
|
||||
if brightness_supported(self.supported_color_modes):
|
||||
try:
|
||||
if brightness := values["brightness"]:
|
||||
self._attr_brightness = int(
|
||||
brightness # type: ignore[operator]
|
||||
/ float(self._config[CONF_BRIGHTNESS_SCALE])
|
||||
* 255
|
||||
self._attr_brightness = min(
|
||||
int(
|
||||
brightness # type: ignore[operator]
|
||||
/ float(self._config[CONF_BRIGHTNESS_SCALE])
|
||||
* 255
|
||||
),
|
||||
255,
|
||||
)
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
|
@ -8,7 +8,6 @@ from typing import Any
|
||||
|
||||
import aiohttp
|
||||
import pyatmo
|
||||
from pyatmo.const import ALL_SCOPES as NETATMO_SCOPES
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import cloud
|
||||
@ -143,7 +142,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
try:
|
||||
await session.async_ensure_token_valid()
|
||||
except aiohttp.ClientResponseError as ex:
|
||||
_LOGGER.debug("API error: %s (%s)", ex.status, ex.message)
|
||||
_LOGGER.warning("API error: %s (%s)", ex.status, ex.message)
|
||||
if ex.status in (
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
HTTPStatus.UNAUTHORIZED,
|
||||
@ -152,19 +151,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
raise ConfigEntryAuthFailed("Token not valid, trigger renewal") from ex
|
||||
raise ConfigEntryNotReady from ex
|
||||
|
||||
if entry.data["auth_implementation"] == cloud.DOMAIN:
|
||||
required_scopes = {
|
||||
scope
|
||||
for scope in NETATMO_SCOPES
|
||||
if scope not in ("access_doorbell", "read_doorbell")
|
||||
}
|
||||
else:
|
||||
required_scopes = set(NETATMO_SCOPES)
|
||||
|
||||
if not (set(session.token["scope"]) & required_scopes):
|
||||
_LOGGER.debug(
|
||||
required_scopes = api.get_api_scopes(entry.data["auth_implementation"])
|
||||
if not (set(session.token["scope"]) & set(required_scopes)):
|
||||
_LOGGER.warning(
|
||||
"Session is missing scopes: %s",
|
||||
required_scopes - set(session.token["scope"]),
|
||||
set(required_scopes) - set(session.token["scope"]),
|
||||
)
|
||||
raise ConfigEntryAuthFailed("Token scope not valid, trigger renewal")
|
||||
|
||||
|
@ -1,11 +1,29 @@
|
||||
"""API for Netatmo bound to HASS OAuth."""
|
||||
from collections.abc import Iterable
|
||||
from typing import cast
|
||||
|
||||
from aiohttp import ClientSession
|
||||
import pyatmo
|
||||
|
||||
from homeassistant.components import cloud
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
|
||||
from .const import API_SCOPES_EXCLUDED_FROM_CLOUD
|
||||
|
||||
|
||||
def get_api_scopes(auth_implementation: str) -> Iterable[str]:
|
||||
"""Return the Netatmo API scopes based on the auth implementation."""
|
||||
|
||||
if auth_implementation == cloud.DOMAIN:
|
||||
return set(
|
||||
{
|
||||
scope
|
||||
for scope in pyatmo.const.ALL_SCOPES
|
||||
if scope not in API_SCOPES_EXCLUDED_FROM_CLOUD
|
||||
}
|
||||
)
|
||||
return sorted(pyatmo.const.ALL_SCOPES)
|
||||
|
||||
|
||||
class AsyncConfigEntryNetatmoAuth(pyatmo.AbstractAsyncAuth):
|
||||
"""Provide Netatmo authentication tied to an OAuth2 based config entry."""
|
||||
|
@ -6,7 +6,6 @@ import logging
|
||||
from typing import Any
|
||||
import uuid
|
||||
|
||||
from pyatmo.const import ALL_SCOPES
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
@ -15,6 +14,7 @@ from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
from homeassistant.helpers import config_entry_oauth2_flow, config_validation as cv
|
||||
|
||||
from .api import get_api_scopes
|
||||
from .const import (
|
||||
CONF_AREA_NAME,
|
||||
CONF_LAT_NE,
|
||||
@ -53,13 +53,7 @@ class NetatmoFlowHandler(
|
||||
@property
|
||||
def extra_authorize_data(self) -> dict:
|
||||
"""Extra data that needs to be appended to the authorize url."""
|
||||
exclude = []
|
||||
if self.flow_impl.name == "Home Assistant Cloud":
|
||||
exclude = ["access_doorbell", "read_doorbell"]
|
||||
|
||||
scopes = [scope for scope in ALL_SCOPES if scope not in exclude]
|
||||
scopes.sort()
|
||||
|
||||
scopes = get_api_scopes(self.flow_impl.domain)
|
||||
return {"scope": " ".join(scopes)}
|
||||
|
||||
async def async_step_user(self, user_input: dict | None = None) -> FlowResult:
|
||||
|
@ -30,6 +30,13 @@ HOME_DATA = "netatmo_home_data"
|
||||
DATA_HANDLER = "netatmo_data_handler"
|
||||
SIGNAL_NAME = "signal_name"
|
||||
|
||||
API_SCOPES_EXCLUDED_FROM_CLOUD = [
|
||||
"access_doorbell",
|
||||
"read_doorbell",
|
||||
"read_mhs1",
|
||||
"write_mhs1",
|
||||
]
|
||||
|
||||
NETATMO_CREATE_BATTERY = "netatmo_create_battery"
|
||||
NETATMO_CREATE_CAMERA = "netatmo_create_camera"
|
||||
NETATMO_CREATE_CAMERA_LIGHT = "netatmo_create_camera_light"
|
||||
|
@ -66,7 +66,11 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
self._reauth_entry.data if self._reauth_entry else {}
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=get_data_schema(currencies, existing_data)
|
||||
step_id="user",
|
||||
data_schema=get_data_schema(currencies, existing_data),
|
||||
description_placeholders={
|
||||
"signup": "https://openexchangerates.org/signup"
|
||||
},
|
||||
)
|
||||
|
||||
errors = {}
|
||||
|
@ -13,7 +13,7 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["boto3", "botocore", "pyhumps", "pyoverkiz", "s3transfer"],
|
||||
"requirements": ["pyoverkiz==1.12.1"],
|
||||
"requirements": ["pyoverkiz==1.13.2"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_kizbox._tcp.local.",
|
||||
|
@ -19,6 +19,7 @@ from homeassistant.components.climate import (
|
||||
from homeassistant.components.cover import ATTR_POSITION, ATTR_TILT_POSITION
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.components.humidifier import ATTR_AVAILABLE_MODES, ATTR_HUMIDITY
|
||||
from homeassistant.components.light import ATTR_BRIGHTNESS
|
||||
from homeassistant.components.sensor import SensorDeviceClass
|
||||
from homeassistant.const import (
|
||||
ATTR_BATTERY_LEVEL,
|
||||
@ -323,14 +324,14 @@ class PrometheusMetrics:
|
||||
}
|
||||
|
||||
def _battery(self, state):
|
||||
if "battery_level" in state.attributes:
|
||||
if (battery_level := state.attributes.get(ATTR_BATTERY_LEVEL)) is not None:
|
||||
metric = self._metric(
|
||||
"battery_level_percent",
|
||||
self.prometheus_cli.Gauge,
|
||||
"Battery level as a percentage of its capacity",
|
||||
)
|
||||
try:
|
||||
value = float(state.attributes[ATTR_BATTERY_LEVEL])
|
||||
value = float(battery_level)
|
||||
metric.labels(**self._labels(state)).set(value)
|
||||
except ValueError:
|
||||
pass
|
||||
@ -434,8 +435,9 @@ class PrometheusMetrics:
|
||||
)
|
||||
|
||||
try:
|
||||
if "brightness" in state.attributes and state.state == STATE_ON:
|
||||
value = state.attributes["brightness"] / 255.0
|
||||
brightness = state.attributes.get(ATTR_BRIGHTNESS)
|
||||
if state.state == STATE_ON and brightness is not None:
|
||||
value = brightness / 255.0
|
||||
else:
|
||||
value = self.state_as_number(state)
|
||||
value = value * 100
|
||||
|
@ -1,17 +1,25 @@
|
||||
"""Support for Rain Bird Irrigation system LNK WiFi Module."""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from pyrainbird.async_client import AsyncRainbirdClient, AsyncRainbirdController
|
||||
from pyrainbird.exceptions import RainbirdApiException
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, Platform
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PASSWORD, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.entity_registry import async_entries_for_config_entry
|
||||
|
||||
from .const import CONF_SERIAL_NUMBER
|
||||
from .coordinator import RainbirdData
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.SWITCH,
|
||||
Platform.SENSOR,
|
||||
@ -36,6 +44,18 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
entry.data[CONF_PASSWORD],
|
||||
)
|
||||
)
|
||||
|
||||
if not (await _async_fix_unique_id(hass, controller, entry)):
|
||||
return False
|
||||
if mac_address := entry.data.get(CONF_MAC):
|
||||
_async_fix_entity_unique_id(
|
||||
hass,
|
||||
er.async_get(hass),
|
||||
entry.entry_id,
|
||||
format_mac(mac_address),
|
||||
str(entry.data[CONF_SERIAL_NUMBER]),
|
||||
)
|
||||
|
||||
try:
|
||||
model_info = await controller.get_model_and_version()
|
||||
except RainbirdApiException as err:
|
||||
@ -51,6 +71,72 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
async def _async_fix_unique_id(
|
||||
hass: HomeAssistant, controller: AsyncRainbirdController, entry: ConfigEntry
|
||||
) -> bool:
|
||||
"""Update the config entry with a unique id based on the mac address."""
|
||||
_LOGGER.debug("Checking for migration of config entry (%s)", entry.unique_id)
|
||||
if not (mac_address := entry.data.get(CONF_MAC)):
|
||||
try:
|
||||
wifi_params = await controller.get_wifi_params()
|
||||
except RainbirdApiException as err:
|
||||
_LOGGER.warning("Unable to fix missing unique id: %s", err)
|
||||
return True
|
||||
|
||||
if (mac_address := wifi_params.mac_address) is None:
|
||||
_LOGGER.warning("Unable to fix missing unique id (mac address was None)")
|
||||
return True
|
||||
|
||||
new_unique_id = format_mac(mac_address)
|
||||
if entry.unique_id == new_unique_id and CONF_MAC in entry.data:
|
||||
_LOGGER.debug("Config entry already in correct state")
|
||||
return True
|
||||
|
||||
entries = hass.config_entries.async_entries(DOMAIN)
|
||||
for existing_entry in entries:
|
||||
if existing_entry.unique_id == new_unique_id:
|
||||
_LOGGER.warning(
|
||||
"Unable to fix missing unique id (already exists); Removing duplicate entry"
|
||||
)
|
||||
hass.async_create_background_task(
|
||||
hass.config_entries.async_remove(entry.entry_id),
|
||||
"Remove rainbird config entry",
|
||||
)
|
||||
return False
|
||||
|
||||
_LOGGER.debug("Updating unique id to %s", new_unique_id)
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
unique_id=new_unique_id,
|
||||
data={
|
||||
**entry.data,
|
||||
CONF_MAC: mac_address,
|
||||
},
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
def _async_fix_entity_unique_id(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
config_entry_id: str,
|
||||
mac_address: str,
|
||||
serial_number: str,
|
||||
) -> None:
|
||||
"""Migrate existing entity if current one can't be found and an old one exists."""
|
||||
entity_entries = async_entries_for_config_entry(entity_registry, config_entry_id)
|
||||
for entity_entry in entity_entries:
|
||||
unique_id = str(entity_entry.unique_id)
|
||||
if unique_id.startswith(mac_address):
|
||||
continue
|
||||
if (suffix := unique_id.removeprefix(str(serial_number))) != unique_id:
|
||||
new_unique_id = f"{mac_address}{suffix}"
|
||||
_LOGGER.debug("Updating unique id from %s to %s", unique_id, new_unique_id)
|
||||
entity_registry.async_update_entity(
|
||||
entity_entry.entity_id, new_unique_id=new_unique_id
|
||||
)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
|
||||
|
@ -11,15 +11,17 @@ from pyrainbird.async_client import (
|
||||
AsyncRainbirdController,
|
||||
RainbirdApiException,
|
||||
)
|
||||
from pyrainbird.data import WifiParams
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PASSWORD
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
from homeassistant.helpers import config_validation as cv, selector
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
|
||||
from .const import (
|
||||
ATTR_DURATION,
|
||||
@ -69,7 +71,7 @@ class RainbirdConfigFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
error_code: str | None = None
|
||||
if user_input:
|
||||
try:
|
||||
serial_number = await self._test_connection(
|
||||
serial_number, wifi_params = await self._test_connection(
|
||||
user_input[CONF_HOST], user_input[CONF_PASSWORD]
|
||||
)
|
||||
except ConfigFlowError as err:
|
||||
@ -77,11 +79,11 @@ class RainbirdConfigFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
error_code = err.error_code
|
||||
else:
|
||||
return await self.async_finish(
|
||||
serial_number,
|
||||
data={
|
||||
CONF_HOST: user_input[CONF_HOST],
|
||||
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||
CONF_SERIAL_NUMBER: serial_number,
|
||||
CONF_MAC: wifi_params.mac_address,
|
||||
},
|
||||
options={ATTR_DURATION: DEFAULT_TRIGGER_TIME_MINUTES},
|
||||
)
|
||||
@ -92,8 +94,10 @@ class RainbirdConfigFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
errors={"base": error_code} if error_code else None,
|
||||
)
|
||||
|
||||
async def _test_connection(self, host: str, password: str) -> str:
|
||||
"""Test the connection and return the device serial number.
|
||||
async def _test_connection(
|
||||
self, host: str, password: str
|
||||
) -> tuple[str, WifiParams]:
|
||||
"""Test the connection and return the device identifiers.
|
||||
|
||||
Raises a ConfigFlowError on failure.
|
||||
"""
|
||||
@ -106,7 +110,10 @@ class RainbirdConfigFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
try:
|
||||
async with asyncio.timeout(TIMEOUT_SECONDS):
|
||||
return await controller.get_serial_number()
|
||||
return await asyncio.gather(
|
||||
controller.get_serial_number(),
|
||||
controller.get_wifi_params(),
|
||||
)
|
||||
except asyncio.TimeoutError as err:
|
||||
raise ConfigFlowError(
|
||||
f"Timeout connecting to Rain Bird controller: {str(err)}",
|
||||
@ -120,18 +127,28 @@ class RainbirdConfigFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
|
||||
async def async_finish(
|
||||
self,
|
||||
serial_number: str,
|
||||
data: dict[str, Any],
|
||||
options: dict[str, Any],
|
||||
) -> FlowResult:
|
||||
"""Create the config entry."""
|
||||
# Prevent devices with the same serial number. If the device does not have a serial number
|
||||
# then we can at least prevent configuring the same host twice.
|
||||
if serial_number:
|
||||
await self.async_set_unique_id(serial_number)
|
||||
self._abort_if_unique_id_configured()
|
||||
else:
|
||||
self._async_abort_entries_match(data)
|
||||
# The integration has historically used a serial number, but not all devices
|
||||
# historically had a valid one. Now the mac address is used as a unique id
|
||||
# and serial is still persisted in config entry data in case it is needed
|
||||
# in the future.
|
||||
# Either way, also prevent configuring the same host twice.
|
||||
await self.async_set_unique_id(format_mac(data[CONF_MAC]))
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={
|
||||
CONF_HOST: data[CONF_HOST],
|
||||
CONF_PASSWORD: data[CONF_PASSWORD],
|
||||
}
|
||||
)
|
||||
self._async_abort_entries_match(
|
||||
{
|
||||
CONF_HOST: data[CONF_HOST],
|
||||
CONF_PASSWORD: data[CONF_PASSWORD],
|
||||
}
|
||||
)
|
||||
return self.async_create_entry(
|
||||
title=data[CONF_HOST],
|
||||
data=data,
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/rainbird",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyrainbird"],
|
||||
"requirements": ["pyrainbird==4.0.0"]
|
||||
"requirements": ["pyrainbird==4.0.1"]
|
||||
}
|
||||
|
@ -28,7 +28,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
try:
|
||||
await renault_hub.async_initialise(config_entry)
|
||||
except aiohttp.ClientResponseError as exc:
|
||||
except aiohttp.ClientError as exc:
|
||||
raise ConfigEntryNotReady() from exc
|
||||
|
||||
hass.data[DOMAIN][config_entry.entry_id] = renault_hub
|
||||
|
@ -7,5 +7,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["smarttub"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["python-smarttub==0.0.35"]
|
||||
"requirements": ["python-smarttub==0.0.36"]
|
||||
}
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/subaru",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["stdiomask", "subarulink"],
|
||||
"requirements": ["subarulink==0.7.8"]
|
||||
"requirements": ["subarulink==0.7.9"]
|
||||
}
|
||||
|
@ -100,10 +100,10 @@ class TomatoDeviceScanner(DeviceScanner):
|
||||
try:
|
||||
if self.ssl:
|
||||
response = requests.Session().send(
|
||||
self.req, timeout=3, verify=self.verify_ssl
|
||||
self.req, timeout=60, verify=self.verify_ssl
|
||||
)
|
||||
else:
|
||||
response = requests.Session().send(self.req, timeout=3)
|
||||
response = requests.Session().send(self.req, timeout=60)
|
||||
|
||||
# Calling and parsing the Tomato api here. We only need the
|
||||
# wldev and dhcpd_lease values.
|
||||
|
@ -41,7 +41,7 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyunifiprotect", "unifi_discovery"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pyunifiprotect==4.20.0", "unifi-discovery==1.1.7"],
|
||||
"requirements": ["pyunifiprotect==4.21.0", "unifi-discovery==1.1.7"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Ubiquiti Networks",
|
||||
|
@ -13,7 +13,7 @@
|
||||
"velbus-packet",
|
||||
"velbus-protocol"
|
||||
],
|
||||
"requirements": ["velbus-aio==2023.10.2"],
|
||||
"requirements": ["velbus-aio==2023.11.0"],
|
||||
"usb": [
|
||||
{
|
||||
"vid": "10CF",
|
||||
|
@ -166,12 +166,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
implementation = await async_get_config_entry_implementation(hass, entry)
|
||||
oauth_session = OAuth2Session(hass, entry, implementation)
|
||||
|
||||
refresh_lock = asyncio.Lock()
|
||||
|
||||
async def _refresh_token() -> str:
|
||||
await oauth_session.async_ensure_token_valid()
|
||||
token = oauth_session.token[CONF_ACCESS_TOKEN]
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(token, str)
|
||||
return token
|
||||
async with refresh_lock:
|
||||
await oauth_session.async_ensure_token_valid()
|
||||
token = oauth_session.token[CONF_ACCESS_TOKEN]
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(token, str)
|
||||
return token
|
||||
|
||||
client.refresh_token_function = _refresh_token
|
||||
withings_data = WithingsData(
|
||||
|
@ -5,5 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/xmpp",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pyasn1", "slixmpp"],
|
||||
"requirements": ["slixmpp==1.8.4"]
|
||||
"requirements": ["slixmpp==1.8.4", "emoji==2.8.0"]
|
||||
}
|
||||
|
@ -18,6 +18,7 @@ from zwave_js_server.const.command_class.multilevel_switch import (
|
||||
from zwave_js_server.const.command_class.window_covering import (
|
||||
NO_POSITION_PROPERTY_KEYS,
|
||||
NO_POSITION_SUFFIX,
|
||||
WINDOW_COVERING_LEVEL_CHANGE_UP_PROPERTY,
|
||||
SlatStates,
|
||||
)
|
||||
from zwave_js_server.model.driver import Driver
|
||||
@ -369,7 +370,7 @@ class ZWaveWindowCovering(CoverPositionMixin, CoverTiltMixin):
|
||||
set_values_func(
|
||||
value,
|
||||
stop_value=self.get_zwave_value(
|
||||
"levelChangeUp",
|
||||
WINDOW_COVERING_LEVEL_CHANGE_UP_PROPERTY,
|
||||
value_property_key=value.property_key,
|
||||
),
|
||||
)
|
||||
|
@ -9,7 +9,7 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["zwave_js_server"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pyserial==3.5", "zwave-js-server-python==0.53.1"],
|
||||
"requirements": ["pyserial==3.5", "zwave-js-server-python==0.54.0"],
|
||||
"usb": [
|
||||
{
|
||||
"vid": "0658",
|
||||
|
@ -7,7 +7,7 @@ from typing import Final
|
||||
APPLICATION_NAME: Final = "HomeAssistant"
|
||||
MAJOR_VERSION: Final = 2023
|
||||
MINOR_VERSION: Final = 11
|
||||
PATCH_VERSION: Final = "2"
|
||||
PATCH_VERSION: Final = "3"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 11, 0)
|
||||
|
@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "homeassistant"
|
||||
version = "2023.11.2"
|
||||
version = "2023.11.3"
|
||||
license = {text = "Apache-2.0"}
|
||||
description = "Open-source home automation platform running on Python 3."
|
||||
readme = "README.rst"
|
||||
|
@ -8,7 +8,7 @@ AEMET-OpenData==0.4.5
|
||||
AIOAladdinConnect==0.1.58
|
||||
|
||||
# homeassistant.components.honeywell
|
||||
AIOSomecomfort==0.0.17
|
||||
AIOSomecomfort==0.0.22
|
||||
|
||||
# homeassistant.components.adax
|
||||
Adax-local==0.1.5
|
||||
@ -55,7 +55,7 @@ ProgettiHWSW==0.1.3
|
||||
# PyBluez==0.22
|
||||
|
||||
# homeassistant.components.cast
|
||||
PyChromecast==13.0.7
|
||||
PyChromecast==13.0.8
|
||||
|
||||
# homeassistant.components.flick_electric
|
||||
PyFlick==0.0.2
|
||||
@ -147,7 +147,7 @@ TwitterAPI==2.7.12
|
||||
WSDiscovery==2.0.0
|
||||
|
||||
# homeassistant.components.accuweather
|
||||
accuweather==2.0.0
|
||||
accuweather==2.1.0
|
||||
|
||||
# homeassistant.components.adax
|
||||
adax==0.3.0
|
||||
@ -216,7 +216,7 @@ aiobafi6==0.9.0
|
||||
aiobotocore==2.6.0
|
||||
|
||||
# homeassistant.components.comelit
|
||||
aiocomelit==0.3.0
|
||||
aiocomelit==0.5.2
|
||||
|
||||
# homeassistant.components.dhcp
|
||||
aiodiscover==1.5.1
|
||||
@ -524,7 +524,7 @@ beautifulsoup4==4.12.2
|
||||
bellows==0.36.8
|
||||
|
||||
# homeassistant.components.bmw_connected_drive
|
||||
bimmer-connected==0.14.2
|
||||
bimmer-connected==0.14.3
|
||||
|
||||
# homeassistant.components.bizkaibus
|
||||
bizkaibus==0.1.1
|
||||
@ -568,7 +568,7 @@ bluetooth-data-tools==1.14.0
|
||||
bond-async==0.2.1
|
||||
|
||||
# homeassistant.components.bosch_shc
|
||||
boschshcpy==0.2.57
|
||||
boschshcpy==0.2.75
|
||||
|
||||
# homeassistant.components.amazon_polly
|
||||
# homeassistant.components.route53
|
||||
@ -742,6 +742,9 @@ elkm1-lib==2.2.6
|
||||
# homeassistant.components.elmax
|
||||
elmax-api==0.0.4
|
||||
|
||||
# homeassistant.components.xmpp
|
||||
emoji==2.8.0
|
||||
|
||||
# homeassistant.components.emulated_roku
|
||||
emulated-roku==0.2.1
|
||||
|
||||
@ -854,7 +857,7 @@ gardena-bluetooth==1.4.0
|
||||
gassist-text==0.0.10
|
||||
|
||||
# homeassistant.components.google
|
||||
gcal-sync==5.0.0
|
||||
gcal-sync==6.0.1
|
||||
|
||||
# homeassistant.components.geniushub
|
||||
geniushub-client==0.7.1
|
||||
@ -1047,7 +1050,7 @@ ibmiotf==0.3.4
|
||||
|
||||
# homeassistant.components.local_calendar
|
||||
# homeassistant.components.local_todo
|
||||
ical==5.1.0
|
||||
ical==6.1.0
|
||||
|
||||
# homeassistant.components.ping
|
||||
icmplib==3.0
|
||||
@ -1185,7 +1188,7 @@ loqedAPI==2.1.8
|
||||
luftdaten==0.7.4
|
||||
|
||||
# homeassistant.components.lupusec
|
||||
lupupy==0.3.0
|
||||
lupupy==0.3.1
|
||||
|
||||
# homeassistant.components.lw12wifi
|
||||
lw12==0.9.2
|
||||
@ -1696,7 +1699,7 @@ pyedimax==0.2.1
|
||||
pyefergy==22.1.1
|
||||
|
||||
# homeassistant.components.enphase_envoy
|
||||
pyenphase==1.14.2
|
||||
pyenphase==1.14.3
|
||||
|
||||
# homeassistant.components.envisalink
|
||||
pyenvisalink==4.6
|
||||
@ -1935,7 +1938,7 @@ pyotgw==2.1.3
|
||||
pyotp==2.8.0
|
||||
|
||||
# homeassistant.components.overkiz
|
||||
pyoverkiz==1.12.1
|
||||
pyoverkiz==1.13.2
|
||||
|
||||
# homeassistant.components.openweathermap
|
||||
pyowm==3.2.0
|
||||
@ -1980,7 +1983,7 @@ pyqwikswitch==0.93
|
||||
pyrail==0.0.3
|
||||
|
||||
# homeassistant.components.rainbird
|
||||
pyrainbird==4.0.0
|
||||
pyrainbird==4.0.1
|
||||
|
||||
# homeassistant.components.recswitch
|
||||
pyrecswitch==1.0.2
|
||||
@ -2150,7 +2153,7 @@ python-kasa[speedups]==0.5.4
|
||||
# python-lirc==1.2.3
|
||||
|
||||
# homeassistant.components.matter
|
||||
python-matter-server==4.0.0
|
||||
python-matter-server==4.0.2
|
||||
|
||||
# homeassistant.components.xiaomi_miio
|
||||
python-miio==0.5.12
|
||||
@ -2187,7 +2190,7 @@ python-ripple-api==0.0.3
|
||||
python-roborock==0.35.0
|
||||
|
||||
# homeassistant.components.smarttub
|
||||
python-smarttub==0.0.35
|
||||
python-smarttub==0.0.36
|
||||
|
||||
# homeassistant.components.songpal
|
||||
python-songpal==0.15.2
|
||||
@ -2229,7 +2232,7 @@ pytrafikverket==0.3.7
|
||||
pyudev==0.23.2
|
||||
|
||||
# homeassistant.components.unifiprotect
|
||||
pyunifiprotect==4.20.0
|
||||
pyunifiprotect==4.21.0
|
||||
|
||||
# homeassistant.components.uptimerobot
|
||||
pyuptimerobot==22.2.0
|
||||
@ -2515,7 +2518,7 @@ streamlabswater==1.0.1
|
||||
stringcase==1.2.0
|
||||
|
||||
# homeassistant.components.subaru
|
||||
subarulink==0.7.8
|
||||
subarulink==0.7.9
|
||||
|
||||
# homeassistant.components.solarlog
|
||||
sunwatcher==0.2.1
|
||||
@ -2664,7 +2667,7 @@ vallox-websocket-api==3.3.0
|
||||
vehicle==2.0.0
|
||||
|
||||
# homeassistant.components.velbus
|
||||
velbus-aio==2023.10.2
|
||||
velbus-aio==2023.11.0
|
||||
|
||||
# homeassistant.components.venstar
|
||||
venstarcolortouch==0.19
|
||||
@ -2824,7 +2827,7 @@ zigpy==0.59.0
|
||||
zm-py==0.5.2
|
||||
|
||||
# homeassistant.components.zwave_js
|
||||
zwave-js-server-python==0.53.1
|
||||
zwave-js-server-python==0.54.0
|
||||
|
||||
# homeassistant.components.zwave_me
|
||||
zwave-me-ws==0.4.3
|
||||
|
@ -10,7 +10,7 @@ AEMET-OpenData==0.4.5
|
||||
AIOAladdinConnect==0.1.58
|
||||
|
||||
# homeassistant.components.honeywell
|
||||
AIOSomecomfort==0.0.17
|
||||
AIOSomecomfort==0.0.22
|
||||
|
||||
# homeassistant.components.adax
|
||||
Adax-local==0.1.5
|
||||
@ -48,7 +48,7 @@ PlexAPI==4.15.4
|
||||
ProgettiHWSW==0.1.3
|
||||
|
||||
# homeassistant.components.cast
|
||||
PyChromecast==13.0.7
|
||||
PyChromecast==13.0.8
|
||||
|
||||
# homeassistant.components.flick_electric
|
||||
PyFlick==0.0.2
|
||||
@ -128,7 +128,7 @@ Tami4EdgeAPI==2.1
|
||||
WSDiscovery==2.0.0
|
||||
|
||||
# homeassistant.components.accuweather
|
||||
accuweather==2.0.0
|
||||
accuweather==2.1.0
|
||||
|
||||
# homeassistant.components.adax
|
||||
adax==0.3.0
|
||||
@ -197,7 +197,7 @@ aiobafi6==0.9.0
|
||||
aiobotocore==2.6.0
|
||||
|
||||
# homeassistant.components.comelit
|
||||
aiocomelit==0.3.0
|
||||
aiocomelit==0.5.2
|
||||
|
||||
# homeassistant.components.dhcp
|
||||
aiodiscover==1.5.1
|
||||
@ -448,7 +448,7 @@ beautifulsoup4==4.12.2
|
||||
bellows==0.36.8
|
||||
|
||||
# homeassistant.components.bmw_connected_drive
|
||||
bimmer-connected==0.14.2
|
||||
bimmer-connected==0.14.3
|
||||
|
||||
# homeassistant.components.bluetooth
|
||||
bleak-retry-connector==3.3.0
|
||||
@ -482,7 +482,7 @@ bluetooth-data-tools==1.14.0
|
||||
bond-async==0.2.1
|
||||
|
||||
# homeassistant.components.bosch_shc
|
||||
boschshcpy==0.2.57
|
||||
boschshcpy==0.2.75
|
||||
|
||||
# homeassistant.components.broadlink
|
||||
broadlink==0.18.3
|
||||
@ -679,7 +679,7 @@ gardena-bluetooth==1.4.0
|
||||
gassist-text==0.0.10
|
||||
|
||||
# homeassistant.components.google
|
||||
gcal-sync==5.0.0
|
||||
gcal-sync==6.0.1
|
||||
|
||||
# homeassistant.components.geocaching
|
||||
geocachingapi==0.2.1
|
||||
@ -827,7 +827,7 @@ ibeacon-ble==1.0.1
|
||||
|
||||
# homeassistant.components.local_calendar
|
||||
# homeassistant.components.local_todo
|
||||
ical==5.1.0
|
||||
ical==6.1.0
|
||||
|
||||
# homeassistant.components.ping
|
||||
icmplib==3.0
|
||||
@ -1278,7 +1278,7 @@ pyeconet==0.1.22
|
||||
pyefergy==22.1.1
|
||||
|
||||
# homeassistant.components.enphase_envoy
|
||||
pyenphase==1.14.2
|
||||
pyenphase==1.14.3
|
||||
|
||||
# homeassistant.components.everlights
|
||||
pyeverlights==0.1.0
|
||||
@ -1460,7 +1460,7 @@ pyotgw==2.1.3
|
||||
pyotp==2.8.0
|
||||
|
||||
# homeassistant.components.overkiz
|
||||
pyoverkiz==1.12.1
|
||||
pyoverkiz==1.13.2
|
||||
|
||||
# homeassistant.components.openweathermap
|
||||
pyowm==3.2.0
|
||||
@ -1496,7 +1496,7 @@ pyps4-2ndscreen==1.3.1
|
||||
pyqwikswitch==0.93
|
||||
|
||||
# homeassistant.components.rainbird
|
||||
pyrainbird==4.0.0
|
||||
pyrainbird==4.0.1
|
||||
|
||||
# homeassistant.components.risco
|
||||
pyrisco==0.5.7
|
||||
@ -1603,7 +1603,7 @@ python-juicenet==1.1.0
|
||||
python-kasa[speedups]==0.5.4
|
||||
|
||||
# homeassistant.components.matter
|
||||
python-matter-server==4.0.0
|
||||
python-matter-server==4.0.2
|
||||
|
||||
# homeassistant.components.xiaomi_miio
|
||||
python-miio==0.5.12
|
||||
@ -1631,7 +1631,7 @@ python-qbittorrent==0.4.3
|
||||
python-roborock==0.35.0
|
||||
|
||||
# homeassistant.components.smarttub
|
||||
python-smarttub==0.0.35
|
||||
python-smarttub==0.0.36
|
||||
|
||||
# homeassistant.components.songpal
|
||||
python-songpal==0.15.2
|
||||
@ -1664,7 +1664,7 @@ pytrafikverket==0.3.7
|
||||
pyudev==0.23.2
|
||||
|
||||
# homeassistant.components.unifiprotect
|
||||
pyunifiprotect==4.20.0
|
||||
pyunifiprotect==4.21.0
|
||||
|
||||
# homeassistant.components.uptimerobot
|
||||
pyuptimerobot==22.2.0
|
||||
@ -1875,7 +1875,7 @@ stookwijzer==1.3.0
|
||||
stringcase==1.2.0
|
||||
|
||||
# homeassistant.components.subaru
|
||||
subarulink==0.7.8
|
||||
subarulink==0.7.9
|
||||
|
||||
# homeassistant.components.solarlog
|
||||
sunwatcher==0.2.1
|
||||
@ -1982,7 +1982,7 @@ vallox-websocket-api==3.3.0
|
||||
vehicle==2.0.0
|
||||
|
||||
# homeassistant.components.velbus
|
||||
velbus-aio==2023.10.2
|
||||
velbus-aio==2023.11.0
|
||||
|
||||
# homeassistant.components.venstar
|
||||
venstarcolortouch==0.19
|
||||
@ -2109,7 +2109,7 @@ zigpy-znp==0.11.6
|
||||
zigpy==0.59.0
|
||||
|
||||
# homeassistant.components.zwave_js
|
||||
zwave-js-server-python==0.53.1
|
||||
zwave-js-server-python==0.54.0
|
||||
|
||||
# homeassistant.components.zwave_me
|
||||
zwave-me-ws==0.4.3
|
||||
|
@ -183,7 +183,7 @@ async def test_api_increase_color_temp(
|
||||
("domain", "payload", "source_list", "idx"),
|
||||
[
|
||||
("media_player", "GAME CONSOLE", ["tv", "game console", 10000], 1),
|
||||
("media_player", "SATELLITE TV", ["satellite-tv", "game console"], 0),
|
||||
("media_player", "SATELLITE TV", ["satellite-tv", "game console", None], 0),
|
||||
("media_player", "SATELLITE TV", ["satellite_tv", "game console"], 0),
|
||||
("media_player", "BAD DEVICE", ["satellite_tv", "game console"], None),
|
||||
],
|
||||
@ -864,6 +864,7 @@ async def test_report_playback_state(hass: HomeAssistant) -> None:
|
||||
| MediaPlayerEntityFeature.PLAY
|
||||
| MediaPlayerEntityFeature.STOP,
|
||||
"volume_level": 0.75,
|
||||
"source_list": [None],
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -1439,6 +1439,8 @@ async def test_media_player_inputs(hass: HomeAssistant) -> None:
|
||||
"aux",
|
||||
"input 1",
|
||||
"tv",
|
||||
0,
|
||||
None,
|
||||
],
|
||||
},
|
||||
)
|
||||
|
@ -7,6 +7,7 @@ from unittest.mock import patch
|
||||
import bleak
|
||||
from bleak.backends.device import BLEDevice
|
||||
from bleak.backends.scanner import AdvertisementData
|
||||
from bleak.exc import BleakError
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.bluetooth import (
|
||||
@ -366,3 +367,25 @@ async def test_we_switch_adapters_on_failure(
|
||||
assert await client.connect() is False
|
||||
cancel_hci0()
|
||||
cancel_hci1()
|
||||
|
||||
|
||||
async def test_raise_after_shutdown(
|
||||
hass: HomeAssistant,
|
||||
two_adapters: None,
|
||||
enable_bluetooth: None,
|
||||
install_bleak_catcher,
|
||||
mock_platform_client_that_raises_on_connect,
|
||||
) -> None:
|
||||
"""Ensure the slot gets released on connection exception."""
|
||||
manager = _get_manager()
|
||||
hci0_device_advs, cancel_hci0, cancel_hci1 = _generate_scanners_with_fake_devices(
|
||||
hass
|
||||
)
|
||||
# hci0 has 2 slots, hci1 has 1 slot
|
||||
with patch.object(manager, "shutdown", True):
|
||||
ble_device = hci0_device_advs["00:00:00:00:00:01"][0]
|
||||
client = bleak.BleakClient(ble_device)
|
||||
with pytest.raises(BleakError, match="shutdown"):
|
||||
await client.connect()
|
||||
cancel_hci0()
|
||||
cancel_hci1()
|
||||
|
@ -6,7 +6,12 @@ from homeassistant.components.coinbase.const import (
|
||||
)
|
||||
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN
|
||||
|
||||
from .const import GOOD_EXCHANGE_RATE, GOOD_EXCHANGE_RATE_2, MOCK_ACCOUNTS_RESPONSE
|
||||
from .const import (
|
||||
GOOD_CURRENCY_2,
|
||||
GOOD_EXCHANGE_RATE,
|
||||
GOOD_EXCHANGE_RATE_2,
|
||||
MOCK_ACCOUNTS_RESPONSE,
|
||||
)
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
@ -60,7 +65,11 @@ def mock_get_exchange_rates():
|
||||
"""Return a heavily reduced mock list of exchange rates for testing."""
|
||||
return {
|
||||
"currency": "USD",
|
||||
"rates": {GOOD_EXCHANGE_RATE_2: "0.109", GOOD_EXCHANGE_RATE: "0.00002"},
|
||||
"rates": {
|
||||
GOOD_CURRENCY_2: "1.0",
|
||||
GOOD_EXCHANGE_RATE_2: "0.109",
|
||||
GOOD_EXCHANGE_RATE: "0.00002",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
|
@ -12,26 +12,23 @@ BAD_EXCHANGE_RATE = "ETH"
|
||||
MOCK_ACCOUNTS_RESPONSE = [
|
||||
{
|
||||
"balance": {"amount": "0.00001", "currency": GOOD_CURRENCY},
|
||||
"currency": GOOD_CURRENCY,
|
||||
"currency": {"code": GOOD_CURRENCY},
|
||||
"id": "123456789",
|
||||
"name": "BTC Wallet",
|
||||
"native_balance": {"amount": "100.12", "currency": GOOD_CURRENCY_2},
|
||||
"type": "wallet",
|
||||
},
|
||||
{
|
||||
"balance": {"amount": "100.00", "currency": GOOD_CURRENCY},
|
||||
"currency": GOOD_CURRENCY,
|
||||
"currency": {"code": GOOD_CURRENCY},
|
||||
"id": "abcdefg",
|
||||
"name": "BTC Vault",
|
||||
"native_balance": {"amount": "100.12", "currency": GOOD_CURRENCY_2},
|
||||
"type": "vault",
|
||||
},
|
||||
{
|
||||
"balance": {"amount": "9.90", "currency": GOOD_CURRENCY_2},
|
||||
"currency": "USD",
|
||||
"currency": {"code": GOOD_CURRENCY_2},
|
||||
"id": "987654321",
|
||||
"name": "USD Wallet",
|
||||
"native_balance": {"amount": "9.90", "currency": GOOD_CURRENCY_2},
|
||||
"type": "fiat",
|
||||
},
|
||||
]
|
||||
|
@ -7,13 +7,11 @@
|
||||
'amount': '**REDACTED**',
|
||||
'currency': 'BTC',
|
||||
}),
|
||||
'currency': 'BTC',
|
||||
'currency': dict({
|
||||
'code': 'BTC',
|
||||
}),
|
||||
'id': '**REDACTED**',
|
||||
'name': 'BTC Wallet',
|
||||
'native_balance': dict({
|
||||
'amount': '**REDACTED**',
|
||||
'currency': 'USD',
|
||||
}),
|
||||
'type': 'wallet',
|
||||
}),
|
||||
dict({
|
||||
@ -21,13 +19,11 @@
|
||||
'amount': '**REDACTED**',
|
||||
'currency': 'BTC',
|
||||
}),
|
||||
'currency': 'BTC',
|
||||
'currency': dict({
|
||||
'code': 'BTC',
|
||||
}),
|
||||
'id': '**REDACTED**',
|
||||
'name': 'BTC Vault',
|
||||
'native_balance': dict({
|
||||
'amount': '**REDACTED**',
|
||||
'currency': 'USD',
|
||||
}),
|
||||
'type': 'vault',
|
||||
}),
|
||||
dict({
|
||||
@ -35,13 +31,11 @@
|
||||
'amount': '**REDACTED**',
|
||||
'currency': 'USD',
|
||||
}),
|
||||
'currency': 'USD',
|
||||
'currency': dict({
|
||||
'code': 'USD',
|
||||
}),
|
||||
'id': '**REDACTED**',
|
||||
'name': 'USD Wallet',
|
||||
'native_balance': dict({
|
||||
'amount': '**REDACTED**',
|
||||
'currency': 'USD',
|
||||
}),
|
||||
'type': 'fiat',
|
||||
}),
|
||||
]),
|
||||
|
@ -1694,3 +1694,62 @@ async def test_specificly_exposed_entities(
|
||||
result_json = await async_get_lights(client)
|
||||
|
||||
assert "1" in result_json
|
||||
|
||||
|
||||
async def test_get_light_state_when_none(hass_hue: HomeAssistant, hue_client) -> None:
|
||||
"""Test the getting of light state when brightness is None."""
|
||||
hass_hue.states.async_set(
|
||||
"light.ceiling_lights",
|
||||
STATE_ON,
|
||||
{
|
||||
light.ATTR_BRIGHTNESS: None,
|
||||
light.ATTR_RGB_COLOR: None,
|
||||
light.ATTR_HS_COLOR: None,
|
||||
light.ATTR_COLOR_TEMP: None,
|
||||
light.ATTR_XY_COLOR: None,
|
||||
light.ATTR_SUPPORTED_COLOR_MODES: [
|
||||
light.COLOR_MODE_COLOR_TEMP,
|
||||
light.COLOR_MODE_HS,
|
||||
light.COLOR_MODE_XY,
|
||||
],
|
||||
light.ATTR_COLOR_MODE: light.COLOR_MODE_XY,
|
||||
},
|
||||
)
|
||||
|
||||
light_json = await perform_get_light_state(
|
||||
hue_client, "light.ceiling_lights", HTTPStatus.OK
|
||||
)
|
||||
state = light_json["state"]
|
||||
assert state[HUE_API_STATE_ON] is True
|
||||
assert state[HUE_API_STATE_BRI] == 1
|
||||
assert state[HUE_API_STATE_HUE] == 0
|
||||
assert state[HUE_API_STATE_SAT] == 0
|
||||
assert state[HUE_API_STATE_CT] == 153
|
||||
|
||||
hass_hue.states.async_set(
|
||||
"light.ceiling_lights",
|
||||
STATE_OFF,
|
||||
{
|
||||
light.ATTR_BRIGHTNESS: None,
|
||||
light.ATTR_RGB_COLOR: None,
|
||||
light.ATTR_HS_COLOR: None,
|
||||
light.ATTR_COLOR_TEMP: None,
|
||||
light.ATTR_XY_COLOR: None,
|
||||
light.ATTR_SUPPORTED_COLOR_MODES: [
|
||||
light.COLOR_MODE_COLOR_TEMP,
|
||||
light.COLOR_MODE_HS,
|
||||
light.COLOR_MODE_XY,
|
||||
],
|
||||
light.ATTR_COLOR_MODE: light.COLOR_MODE_XY,
|
||||
},
|
||||
)
|
||||
|
||||
light_json = await perform_get_light_state(
|
||||
hue_client, "light.ceiling_lights", HTTPStatus.OK
|
||||
)
|
||||
state = light_json["state"]
|
||||
assert state[HUE_API_STATE_ON] is False
|
||||
assert state[HUE_API_STATE_BRI] == 1
|
||||
assert state[HUE_API_STATE_HUE] == 0
|
||||
assert state[HUE_API_STATE_SAT] == 0
|
||||
assert state[HUE_API_STATE_CT] == 153
|
||||
|
62
tests/components/esphome/bluetooth/test_client.py
Normal file
62
tests/components/esphome/bluetooth/test_client.py
Normal file
@ -0,0 +1,62 @@
|
||||
"""Tests for ESPHomeClient."""
|
||||
from __future__ import annotations
|
||||
|
||||
from aioesphomeapi import APIClient, APIVersion, BluetoothProxyFeature, DeviceInfo
|
||||
from bleak.exc import BleakError
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.bluetooth import HaBluetoothConnector
|
||||
from homeassistant.components.esphome.bluetooth.cache import ESPHomeBluetoothCache
|
||||
from homeassistant.components.esphome.bluetooth.client import (
|
||||
ESPHomeClient,
|
||||
ESPHomeClientData,
|
||||
)
|
||||
from homeassistant.components.esphome.bluetooth.device import ESPHomeBluetoothDevice
|
||||
from homeassistant.components.esphome.bluetooth.scanner import ESPHomeScanner
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.components.bluetooth import generate_ble_device
|
||||
|
||||
ESP_MAC_ADDRESS = "AA:BB:CC:DD:EE:FF"
|
||||
ESP_NAME = "proxy"
|
||||
|
||||
|
||||
@pytest.fixture(name="client_data")
|
||||
async def client_data_fixture(
|
||||
hass: HomeAssistant, mock_client: APIClient
|
||||
) -> ESPHomeClientData:
|
||||
"""Return a client data fixture."""
|
||||
connector = HaBluetoothConnector(ESPHomeClientData, ESP_MAC_ADDRESS, lambda: True)
|
||||
return ESPHomeClientData(
|
||||
bluetooth_device=ESPHomeBluetoothDevice(ESP_NAME, ESP_MAC_ADDRESS),
|
||||
cache=ESPHomeBluetoothCache(),
|
||||
client=mock_client,
|
||||
device_info=DeviceInfo(
|
||||
mac_address=ESP_MAC_ADDRESS,
|
||||
name=ESP_NAME,
|
||||
bluetooth_proxy_feature_flags=BluetoothProxyFeature.PASSIVE_SCAN
|
||||
& BluetoothProxyFeature.ACTIVE_CONNECTIONS
|
||||
& BluetoothProxyFeature.REMOTE_CACHING
|
||||
& BluetoothProxyFeature.PAIRING
|
||||
& BluetoothProxyFeature.CACHE_CLEARING
|
||||
& BluetoothProxyFeature.RAW_ADVERTISEMENTS,
|
||||
),
|
||||
api_version=APIVersion(1, 9),
|
||||
title=ESP_NAME,
|
||||
scanner=ESPHomeScanner(
|
||||
hass, ESP_MAC_ADDRESS, ESP_NAME, lambda info: None, connector, True
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def test_client_usage_while_not_connected(client_data: ESPHomeClientData) -> None:
|
||||
"""Test client usage while not connected."""
|
||||
ble_device = generate_ble_device(
|
||||
"CC:BB:AA:DD:EE:FF", details={"source": ESP_MAC_ADDRESS, "address_type": 1}
|
||||
)
|
||||
|
||||
client = ESPHomeClient(ble_device, client_data=client_data)
|
||||
with pytest.raises(
|
||||
BleakError, match=f"{ESP_NAME}.*{ESP_MAC_ADDRESS}.*not connected"
|
||||
):
|
||||
await client.write_gatt_char("test", b"test") is False
|
@ -32,6 +32,15 @@ PROFILE_USER_ID = "fitbit-api-user-id-1"
|
||||
FAKE_ACCESS_TOKEN = "some-access-token"
|
||||
FAKE_REFRESH_TOKEN = "some-refresh-token"
|
||||
FAKE_AUTH_IMPL = "conftest-imported-cred"
|
||||
FULL_NAME = "First Last"
|
||||
DISPLAY_NAME = "First L."
|
||||
PROFILE_DATA = {
|
||||
"fullName": FULL_NAME,
|
||||
"displayName": DISPLAY_NAME,
|
||||
"displayNameSetting": "name",
|
||||
"firstName": "First",
|
||||
"lastName": "Last",
|
||||
}
|
||||
|
||||
PROFILE_API_URL = "https://api.fitbit.com/1/user/-/profile.json"
|
||||
DEVICES_API_URL = "https://api.fitbit.com/1/user/-/devices.json"
|
||||
@ -214,20 +223,34 @@ def mock_profile_locale() -> str:
|
||||
return "en_US"
|
||||
|
||||
|
||||
@pytest.fixture(name="profile_data")
|
||||
def mock_profile_data() -> dict[str, Any]:
|
||||
"""Fixture to return other profile data fields."""
|
||||
return PROFILE_DATA
|
||||
|
||||
|
||||
@pytest.fixture(name="profile_response")
|
||||
def mock_profile_response(
|
||||
profile_id: str, profile_locale: str, profile_data: dict[str, Any]
|
||||
) -> dict[str, Any]:
|
||||
"""Fixture to construct the fake profile API response."""
|
||||
return {
|
||||
"user": {
|
||||
"encodedId": profile_id,
|
||||
"locale": profile_locale,
|
||||
**profile_data,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(name="profile", autouse=True)
|
||||
def mock_profile(requests_mock: Mocker, profile_id: str, profile_locale: str) -> None:
|
||||
def mock_profile(requests_mock: Mocker, profile_response: dict[str, Any]) -> None:
|
||||
"""Fixture to setup fake requests made to Fitbit API during config flow."""
|
||||
requests_mock.register_uri(
|
||||
"GET",
|
||||
PROFILE_API_URL,
|
||||
status_code=HTTPStatus.OK,
|
||||
json={
|
||||
"user": {
|
||||
"encodedId": profile_id,
|
||||
"fullName": "My name",
|
||||
"locale": profile_locale,
|
||||
},
|
||||
},
|
||||
json=profile_response,
|
||||
)
|
||||
|
||||
|
||||
|
@ -17,8 +17,10 @@ from homeassistant.helpers import config_entry_oauth2_flow, issue_registry as ir
|
||||
|
||||
from .conftest import (
|
||||
CLIENT_ID,
|
||||
DISPLAY_NAME,
|
||||
FAKE_AUTH_IMPL,
|
||||
PROFILE_API_URL,
|
||||
PROFILE_DATA,
|
||||
PROFILE_USER_ID,
|
||||
SERVER_ACCESS_TOKEN,
|
||||
)
|
||||
@ -76,7 +78,7 @@ async def test_full_flow(
|
||||
entries = hass.config_entries.async_entries(DOMAIN)
|
||||
assert len(entries) == 1
|
||||
config_entry = entries[0]
|
||||
assert config_entry.title == "My name"
|
||||
assert config_entry.title == DISPLAY_NAME
|
||||
assert config_entry.unique_id == PROFILE_USER_ID
|
||||
|
||||
data = dict(config_entry.data)
|
||||
@ -286,7 +288,7 @@ async def test_import_fitbit_config(
|
||||
|
||||
# Verify valid profile can be fetched from the API
|
||||
config_entry = entries[0]
|
||||
assert config_entry.title == "My name"
|
||||
assert config_entry.title == DISPLAY_NAME
|
||||
assert config_entry.unique_id == PROFILE_USER_ID
|
||||
|
||||
data = dict(config_entry.data)
|
||||
@ -598,3 +600,60 @@ async def test_reauth_wrong_user_id(
|
||||
assert result.get("reason") == "wrong_account"
|
||||
|
||||
assert len(mock_setup.mock_calls) == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("profile_data", "expected_title"),
|
||||
[
|
||||
(PROFILE_DATA, DISPLAY_NAME),
|
||||
({"displayName": DISPLAY_NAME}, DISPLAY_NAME),
|
||||
],
|
||||
ids=("full_profile_data", "display_name_only"),
|
||||
)
|
||||
async def test_partial_profile_data(
|
||||
hass: HomeAssistant,
|
||||
hass_client_no_auth: ClientSessionGenerator,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
current_request_with_host: None,
|
||||
profile: None,
|
||||
setup_credentials: None,
|
||||
expected_title: str,
|
||||
) -> None:
|
||||
"""Check full flow."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
state = config_entry_oauth2_flow._encode_jwt(
|
||||
hass,
|
||||
{
|
||||
"flow_id": result["flow_id"],
|
||||
"redirect_uri": REDIRECT_URL,
|
||||
},
|
||||
)
|
||||
assert result["type"] == FlowResultType.EXTERNAL_STEP
|
||||
assert result["url"] == (
|
||||
f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}"
|
||||
f"&redirect_uri={REDIRECT_URL}"
|
||||
f"&state={state}"
|
||||
"&scope=activity+heartrate+nutrition+profile+settings+sleep+weight&prompt=consent"
|
||||
)
|
||||
|
||||
client = await hass_client_no_auth()
|
||||
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
|
||||
assert resp.status == 200
|
||||
|
||||
aioclient_mock.post(
|
||||
OAUTH2_TOKEN,
|
||||
json=SERVER_ACCESS_TOKEN,
|
||||
)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.fitbit.async_setup_entry", return_value=True
|
||||
) as mock_setup:
|
||||
await hass.config_entries.flow.async_configure(result["flow_id"])
|
||||
|
||||
assert len(mock_setup.mock_calls) == 1
|
||||
entries = hass.config_entries.async_entries(DOMAIN)
|
||||
assert len(entries) == 1
|
||||
config_entry = entries[0]
|
||||
assert config_entry.title == expected_title
|
||||
|
@ -771,3 +771,60 @@ async def test_device_battery_level_reauth_required(
|
||||
flows = hass.config_entries.flow.async_progress()
|
||||
assert len(flows) == 1
|
||||
assert flows[0]["step_id"] == "reauth_confirm"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("scopes", "response_data", "expected_state"),
|
||||
[
|
||||
(["heartrate"], {}, "unknown"),
|
||||
(
|
||||
["heartrate"],
|
||||
{
|
||||
"restingHeartRate": 120,
|
||||
},
|
||||
"120",
|
||||
),
|
||||
(
|
||||
["heartrate"],
|
||||
{
|
||||
"restingHeartRate": 0,
|
||||
},
|
||||
"0",
|
||||
),
|
||||
],
|
||||
ids=("missing", "valid", "zero"),
|
||||
)
|
||||
async def test_resting_heart_rate_responses(
|
||||
hass: HomeAssistant,
|
||||
setup_credentials: None,
|
||||
integration_setup: Callable[[], Awaitable[bool]],
|
||||
register_timeseries: Callable[[str, dict[str, Any]], None],
|
||||
response_data: dict[str, Any],
|
||||
expected_state: str,
|
||||
) -> None:
|
||||
"""Test resting heart rate sensor with various values from response."""
|
||||
|
||||
register_timeseries(
|
||||
"activities/heart",
|
||||
timeseries_response(
|
||||
"activities-heart",
|
||||
{
|
||||
"customHeartRateZones": [],
|
||||
"heartRateZones": [
|
||||
{
|
||||
"caloriesOut": 0,
|
||||
"max": 220,
|
||||
"min": 159,
|
||||
"minutes": 0,
|
||||
"name": "Peak",
|
||||
},
|
||||
],
|
||||
**response_data,
|
||||
},
|
||||
),
|
||||
)
|
||||
assert await integration_setup()
|
||||
|
||||
state = hass.states.get("sensor.resting_heart_rate")
|
||||
assert state
|
||||
assert state.state == expected_state
|
||||
|
@ -1301,3 +1301,51 @@ async def test_event_differs_timezone(
|
||||
"description": event["description"],
|
||||
"supported_features": 3,
|
||||
}
|
||||
|
||||
|
||||
async def test_invalid_rrule_fix(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
mock_events_list_items,
|
||||
component_setup,
|
||||
) -> None:
|
||||
"""Test that an invalid RRULE returned from Google Calendar API is handled correctly end to end."""
|
||||
week_from_today = dt_util.now().date() + datetime.timedelta(days=7)
|
||||
end_event = week_from_today + datetime.timedelta(days=1)
|
||||
event = {
|
||||
**TEST_EVENT,
|
||||
"start": {"date": week_from_today.isoformat()},
|
||||
"end": {"date": end_event.isoformat()},
|
||||
"recurrence": [
|
||||
"RRULE:DATE;TZID=Europe/Warsaw:20230818T020000,20230915T020000,20231013T020000,20231110T010000,20231208T010000",
|
||||
],
|
||||
}
|
||||
mock_events_list_items([event])
|
||||
|
||||
assert await component_setup()
|
||||
|
||||
state = hass.states.get(TEST_ENTITY)
|
||||
assert state.name == TEST_ENTITY_NAME
|
||||
assert state.state == STATE_OFF
|
||||
|
||||
# Pick a date range that contains two instances of the event
|
||||
web_client = await hass_client()
|
||||
response = await web_client.get(
|
||||
get_events_url(TEST_ENTITY, "2023-08-10T00:00:00Z", "2023-09-20T00:00:00Z")
|
||||
)
|
||||
assert response.status == HTTPStatus.OK
|
||||
events = await response.json()
|
||||
|
||||
# Both instances are returned, however the RDATE rule is ignored by Home
|
||||
# Assistant so they are just treateded as flattened events.
|
||||
assert len(events) == 2
|
||||
|
||||
event = events[0]
|
||||
assert event["uid"] == "cydrevtfuybguinhomj@google.com"
|
||||
assert event["recurrence_id"] == "_c8rinwq863h45qnucyoi43ny8_20230818"
|
||||
assert event["rrule"] is None
|
||||
|
||||
event = events[1]
|
||||
assert event["uid"] == "cydrevtfuybguinhomj@google.com"
|
||||
assert event["recurrence_id"] == "_c8rinwq863h45qnucyoi43ny8_20230915"
|
||||
assert event["rrule"] is None
|
||||
|
@ -378,3 +378,29 @@ async def test_failed_login_attempts_counter(
|
||||
resp = await client.get("/auth_true")
|
||||
assert resp.status == HTTPStatus.OK
|
||||
assert app[KEY_FAILED_LOGIN_ATTEMPTS][remote_ip] == 2
|
||||
|
||||
|
||||
async def test_single_ban_file_entry(
|
||||
hass: HomeAssistant,
|
||||
) -> None:
|
||||
"""Test that only one item is added to ban file."""
|
||||
app = web.Application()
|
||||
app["hass"] = hass
|
||||
|
||||
async def unauth_handler(request):
|
||||
"""Return a mock web response."""
|
||||
raise HTTPUnauthorized
|
||||
|
||||
app.router.add_get("/example", unauth_handler)
|
||||
setup_bans(hass, app, 2)
|
||||
mock_real_ip(app)("200.201.202.204")
|
||||
|
||||
manager: IpBanManager = app[KEY_BAN_MANAGER]
|
||||
m_open = mock_open()
|
||||
|
||||
with patch("homeassistant.components.http.ban.open", m_open, create=True):
|
||||
remote_ip = ip_address("200.201.202.204")
|
||||
await manager.async_add_ban(remote_ip)
|
||||
await manager.async_add_ban(remote_ip)
|
||||
|
||||
assert m_open.call_count == 1
|
||||
|
@ -18,16 +18,25 @@ TEST_MESSAGE_HEADERS1 = (
|
||||
b"for <notify@example.com>; Fri, 24 Mar 2023 13:52:01 +0100 (CET)\r\n"
|
||||
)
|
||||
TEST_MESSAGE_HEADERS2 = (
|
||||
b"MIME-Version: 1.0\r\n"
|
||||
b"To: notify@example.com\r\n"
|
||||
b"From: John Doe <john.doe@example.com>\r\n"
|
||||
b"Subject: Test subject\r\n"
|
||||
b"Message-ID: <N753P9hLvLw3lYGan11ji9WggPjxtLSpKvFOYgdnE@example.com>"
|
||||
b"Message-ID: <N753P9hLvLw3lYGan11ji9WggPjxtLSpKvFOYgdnE@example.com>\r\n"
|
||||
b"MIME-Version: 1.0\r\n"
|
||||
)
|
||||
|
||||
TEST_MULTIPART_HEADER = (
|
||||
b'Content-Type: multipart/related;\r\n\tboundary="Mark=_100584970350292485166"'
|
||||
)
|
||||
|
||||
TEST_MESSAGE_HEADERS3 = b""
|
||||
|
||||
TEST_MESSAGE = TEST_MESSAGE_HEADERS1 + DATE_HEADER1 + TEST_MESSAGE_HEADERS2
|
||||
|
||||
TEST_MESSAGE_MULTIPART = (
|
||||
TEST_MESSAGE_HEADERS1 + DATE_HEADER1 + TEST_MESSAGE_HEADERS2 + TEST_MULTIPART_HEADER
|
||||
)
|
||||
|
||||
TEST_MESSAGE_NO_SUBJECT_TO_FROM = (
|
||||
TEST_MESSAGE_HEADERS1 + DATE_HEADER1 + TEST_MESSAGE_HEADERS3
|
||||
)
|
||||
@ -44,21 +53,27 @@ TEST_INVALID_DATE3 = (
|
||||
|
||||
TEST_CONTENT_TEXT_BARE = b"\r\nTest body\r\n\r\n"
|
||||
|
||||
TEST_CONTENT_BINARY = (
|
||||
b"Content-Type: application/binary\r\n"
|
||||
b"Content-Transfer-Encoding: base64\r\n"
|
||||
b"\r\n"
|
||||
b"VGVzdCBib2R5\r\n"
|
||||
)
|
||||
TEST_CONTENT_BINARY = b"Content-Type: application/binary\r\n\r\nTest body\r\n"
|
||||
|
||||
TEST_CONTENT_TEXT_PLAIN = (
|
||||
b"Content-Type: text/plain; charset=UTF-8; format=flowed\r\n"
|
||||
b"Content-Transfer-Encoding: 7bit\r\n\r\nTest body\r\n\r\n"
|
||||
b'Content-Type: text/plain; charset="utf-8"\r\n'
|
||||
b"Content-Transfer-Encoding: 7bit\r\n\r\nTest body\r\n"
|
||||
)
|
||||
|
||||
TEST_CONTENT_TEXT_BASE64 = (
|
||||
b'Content-Type: text/plain; charset="utf-8"\r\n'
|
||||
b"Content-Transfer-Encoding: base64\r\n\r\nVGVzdCBib2R5\r\n"
|
||||
)
|
||||
|
||||
TEST_CONTENT_TEXT_BASE64_INVALID = (
|
||||
b'Content-Type: text/plain; charset="utf-8"\r\n'
|
||||
b"Content-Transfer-Encoding: base64\r\n\r\nVGVzdCBib2R5invalid\r\n"
|
||||
)
|
||||
TEST_BADLY_ENCODED_CONTENT = "VGVzdCBib2R5invalid\r\n"
|
||||
|
||||
TEST_CONTENT_TEXT_OTHER = (
|
||||
b"Content-Type: text/other; charset=UTF-8\r\n"
|
||||
b"Content-Transfer-Encoding: 7bit\r\n\r\nTest body\r\n\r\n"
|
||||
b"Content-Transfer-Encoding: 7bit\r\n\r\nTest body\r\n"
|
||||
)
|
||||
|
||||
TEST_CONTENT_HTML = (
|
||||
@ -76,14 +91,40 @@ TEST_CONTENT_HTML = (
|
||||
b"</html>\r\n"
|
||||
b"\r\n"
|
||||
)
|
||||
TEST_CONTENT_HTML_BASE64 = (
|
||||
b"Content-Type: text/html; charset=UTF-8\r\n"
|
||||
b"Content-Transfer-Encoding: base64\r\n\r\n"
|
||||
b"PGh0bWw+CiAgICA8aGVhZD48bWV0YSBodHRwLWVxdW"
|
||||
b"l2PSJjb250ZW50LXR5cGUiIGNvbnRlbnQ9InRleHQvaHRtbDsgY2hhcnNldD1VVEYtOCI+PC9oZWFkPgog"
|
||||
b"CAgPGJvZHk+CiAgICAgIDxwPlRlc3QgYm9keTxicj48L3A+CiAgICA8L2JvZHk+CjwvaHRtbD4=\r\n"
|
||||
)
|
||||
|
||||
|
||||
TEST_CONTENT_MULTIPART = (
|
||||
b"\r\nThis is a multi-part message in MIME format.\r\n"
|
||||
+ b"--------------McwBciN2C0o3rWeF1tmFo2oI\r\n"
|
||||
+ b"\r\n--Mark=_100584970350292485166\r\n"
|
||||
+ TEST_CONTENT_TEXT_PLAIN
|
||||
+ b"--------------McwBciN2C0o3rWeF1tmFo2oI\r\n"
|
||||
+ b"\r\n--Mark=_100584970350292485166\r\n"
|
||||
+ TEST_CONTENT_HTML
|
||||
+ b"--------------McwBciN2C0o3rWeF1tmFo2oI--\r\n"
|
||||
+ b"\r\n--Mark=_100584970350292485166--\r\n"
|
||||
)
|
||||
|
||||
TEST_CONTENT_MULTIPART_BASE64 = (
|
||||
b"\r\nThis is a multi-part message in MIME format.\r\n"
|
||||
+ b"\r\n--Mark=_100584970350292485166\r\n"
|
||||
+ TEST_CONTENT_TEXT_BASE64
|
||||
+ b"\r\n--Mark=_100584970350292485166\r\n"
|
||||
+ TEST_CONTENT_HTML_BASE64
|
||||
+ b"\r\n--Mark=_100584970350292485166--\r\n"
|
||||
)
|
||||
|
||||
TEST_CONTENT_MULTIPART_BASE64_INVALID = (
|
||||
b"\r\nThis is a multi-part message in MIME format.\r\n"
|
||||
+ b"\r\n--Mark=_100584970350292485166\r\n"
|
||||
+ TEST_CONTENT_TEXT_BASE64_INVALID
|
||||
+ b"\r\n--Mark=_100584970350292485166\r\n"
|
||||
+ TEST_CONTENT_HTML_BASE64
|
||||
+ b"\r\n--Mark=_100584970350292485166--\r\n"
|
||||
)
|
||||
|
||||
EMPTY_SEARCH_RESPONSE = ("OK", [b"", b"Search completed (0.0001 + 0.000 secs)."])
|
||||
@ -202,14 +243,40 @@ TEST_FETCH_RESPONSE_MULTIPART = (
|
||||
"OK",
|
||||
[
|
||||
b"1 FETCH (BODY[] {"
|
||||
+ str(len(TEST_MESSAGE + TEST_CONTENT_MULTIPART)).encode("utf-8")
|
||||
+ str(len(TEST_MESSAGE_MULTIPART + TEST_CONTENT_MULTIPART)).encode("utf-8")
|
||||
+ b"}",
|
||||
bytearray(TEST_MESSAGE + TEST_CONTENT_MULTIPART),
|
||||
bytearray(TEST_MESSAGE_MULTIPART + TEST_CONTENT_MULTIPART),
|
||||
b")",
|
||||
b"Fetch completed (0.0001 + 0.000 secs).",
|
||||
],
|
||||
)
|
||||
TEST_FETCH_RESPONSE_MULTIPART_BASE64 = (
|
||||
"OK",
|
||||
[
|
||||
b"1 FETCH (BODY[] {"
|
||||
+ str(len(TEST_MESSAGE_MULTIPART + TEST_CONTENT_MULTIPART_BASE64)).encode(
|
||||
"utf-8"
|
||||
)
|
||||
+ b"}",
|
||||
bytearray(TEST_MESSAGE_MULTIPART + TEST_CONTENT_MULTIPART_BASE64),
|
||||
b")",
|
||||
b"Fetch completed (0.0001 + 0.000 secs).",
|
||||
],
|
||||
)
|
||||
|
||||
TEST_FETCH_RESPONSE_MULTIPART_BASE64_INVALID = (
|
||||
"OK",
|
||||
[
|
||||
b"1 FETCH (BODY[] {"
|
||||
+ str(
|
||||
len(TEST_MESSAGE_MULTIPART + TEST_CONTENT_MULTIPART_BASE64_INVALID)
|
||||
).encode("utf-8")
|
||||
+ b"}",
|
||||
bytearray(TEST_MESSAGE_MULTIPART + TEST_CONTENT_MULTIPART_BASE64_INVALID),
|
||||
b")",
|
||||
b"Fetch completed (0.0001 + 0.000 secs).",
|
||||
],
|
||||
)
|
||||
|
||||
TEST_FETCH_RESPONSE_NO_SUBJECT_TO_FROM = (
|
||||
"OK",
|
||||
|
@ -17,12 +17,15 @@ from homeassistant.util.dt import utcnow
|
||||
from .const import (
|
||||
BAD_RESPONSE,
|
||||
EMPTY_SEARCH_RESPONSE,
|
||||
TEST_BADLY_ENCODED_CONTENT,
|
||||
TEST_FETCH_RESPONSE_BINARY,
|
||||
TEST_FETCH_RESPONSE_HTML,
|
||||
TEST_FETCH_RESPONSE_INVALID_DATE1,
|
||||
TEST_FETCH_RESPONSE_INVALID_DATE2,
|
||||
TEST_FETCH_RESPONSE_INVALID_DATE3,
|
||||
TEST_FETCH_RESPONSE_MULTIPART,
|
||||
TEST_FETCH_RESPONSE_MULTIPART_BASE64,
|
||||
TEST_FETCH_RESPONSE_MULTIPART_BASE64_INVALID,
|
||||
TEST_FETCH_RESPONSE_NO_SUBJECT_TO_FROM,
|
||||
TEST_FETCH_RESPONSE_TEXT_BARE,
|
||||
TEST_FETCH_RESPONSE_TEXT_OTHER,
|
||||
@ -110,6 +113,7 @@ async def test_entry_startup_fails(
|
||||
(TEST_FETCH_RESPONSE_TEXT_OTHER, True),
|
||||
(TEST_FETCH_RESPONSE_HTML, True),
|
||||
(TEST_FETCH_RESPONSE_MULTIPART, True),
|
||||
(TEST_FETCH_RESPONSE_MULTIPART_BASE64, True),
|
||||
(TEST_FETCH_RESPONSE_BINARY, True),
|
||||
],
|
||||
ids=[
|
||||
@ -122,6 +126,7 @@ async def test_entry_startup_fails(
|
||||
"other",
|
||||
"html",
|
||||
"multipart",
|
||||
"multipart_base64",
|
||||
"binary",
|
||||
],
|
||||
)
|
||||
@ -154,7 +159,7 @@ async def test_receiving_message_successfully(
|
||||
assert data["folder"] == "INBOX"
|
||||
assert data["sender"] == "john.doe@example.com"
|
||||
assert data["subject"] == "Test subject"
|
||||
assert data["text"]
|
||||
assert "Test body" in data["text"]
|
||||
assert (
|
||||
valid_date
|
||||
and isinstance(data["date"], datetime)
|
||||
@ -163,6 +168,48 @@ async def test_receiving_message_successfully(
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("imap_search", [TEST_SEARCH_RESPONSE])
|
||||
@pytest.mark.parametrize(
|
||||
("imap_fetch"),
|
||||
[
|
||||
TEST_FETCH_RESPONSE_MULTIPART_BASE64_INVALID,
|
||||
],
|
||||
ids=[
|
||||
"multipart_base64_invalid",
|
||||
],
|
||||
)
|
||||
@pytest.mark.parametrize("imap_has_capability", [True, False], ids=["push", "poll"])
|
||||
async def test_receiving_message_with_invalid_encoding(
|
||||
hass: HomeAssistant, mock_imap_protocol: MagicMock
|
||||
) -> None:
|
||||
"""Test receiving a message successfully."""
|
||||
event_called = async_capture_events(hass, "imap_content")
|
||||
|
||||
config_entry = MockConfigEntry(domain=DOMAIN, data=MOCK_CONFIG)
|
||||
config_entry.add_to_hass(hass)
|
||||
assert await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
# Make sure we have had one update (when polling)
|
||||
async_fire_time_changed(hass, utcnow() + timedelta(seconds=5))
|
||||
await hass.async_block_till_done()
|
||||
state = hass.states.get("sensor.imap_email_email_com")
|
||||
# we should have received one message
|
||||
assert state is not None
|
||||
assert state.state == "1"
|
||||
assert state.attributes["state_class"] == SensorStateClass.MEASUREMENT
|
||||
|
||||
# we should have received one event
|
||||
assert len(event_called) == 1
|
||||
data: dict[str, Any] = event_called[0].data
|
||||
assert data["server"] == "imap.server.com"
|
||||
assert data["username"] == "email@email.com"
|
||||
assert data["search"] == "UnSeen UnDeleted"
|
||||
assert data["folder"] == "INBOX"
|
||||
assert data["sender"] == "john.doe@example.com"
|
||||
assert data["subject"] == "Test subject"
|
||||
assert data["text"] == TEST_BADLY_ENCODED_CONTENT
|
||||
|
||||
|
||||
@pytest.mark.parametrize("imap_search", [TEST_SEARCH_RESPONSE])
|
||||
@pytest.mark.parametrize("imap_fetch", [TEST_FETCH_RESPONSE_NO_SUBJECT_TO_FROM])
|
||||
@pytest.mark.parametrize("imap_has_capability", [True, False], ids=["push", "poll"])
|
||||
@ -196,7 +243,7 @@ async def test_receiving_message_no_subject_to_from(
|
||||
assert data["date"] == datetime(
|
||||
2023, 3, 24, 13, 52, tzinfo=timezone(timedelta(seconds=3600))
|
||||
)
|
||||
assert data["text"] == "Test body\r\n\r\n"
|
||||
assert data["text"] == "Test body\r\n"
|
||||
assert data["headers"]["Return-Path"] == ("<john.doe@example.com>",)
|
||||
assert data["headers"]["Delivered-To"] == ("notify@example.com",)
|
||||
|
||||
|
@ -237,6 +237,54 @@ async def test_update_item(
|
||||
assert state.state == "0"
|
||||
|
||||
|
||||
async def test_rename(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: None,
|
||||
ws_get_items: Callable[[], Awaitable[dict[str, str]]],
|
||||
) -> None:
|
||||
"""Test renaming a todo item."""
|
||||
|
||||
# Create new item
|
||||
await hass.services.async_call(
|
||||
TODO_DOMAIN,
|
||||
"add_item",
|
||||
{"item": "soda"},
|
||||
target={"entity_id": TEST_ENTITY},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
# Fetch item
|
||||
items = await ws_get_items()
|
||||
assert len(items) == 1
|
||||
item = items[0]
|
||||
assert item["summary"] == "soda"
|
||||
assert item["status"] == "needs_action"
|
||||
|
||||
state = hass.states.get(TEST_ENTITY)
|
||||
assert state
|
||||
assert state.state == "1"
|
||||
|
||||
# Rename item
|
||||
await hass.services.async_call(
|
||||
TODO_DOMAIN,
|
||||
"update_item",
|
||||
{"item": item["uid"], "rename": "water"},
|
||||
target={"entity_id": TEST_ENTITY},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
# Verify item has been renamed
|
||||
items = await ws_get_items()
|
||||
assert len(items) == 1
|
||||
item = items[0]
|
||||
assert item["summary"] == "water"
|
||||
assert item["status"] == "needs_action"
|
||||
|
||||
state = hass.states.get(TEST_ENTITY)
|
||||
assert state
|
||||
assert state.state == "1"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("src_idx", "dst_idx", "expected_items"),
|
||||
[
|
||||
|
@ -14,6 +14,7 @@
|
||||
"node_id": 5,
|
||||
"date_commissioned": "2023-01-16T21:07:57.508440",
|
||||
"last_interview": "2023-01-16T21:07:57.508448",
|
||||
"last_subscription_attempt": 0,
|
||||
"interview_version": 2,
|
||||
"attributes": {
|
||||
"0/4/0": 128,
|
||||
|
@ -3,6 +3,7 @@
|
||||
"date_commissioned": "2023-01-16T21:07:57.508440",
|
||||
"last_interview": "2023-01-16T21:07:57.508448",
|
||||
"interview_version": 2,
|
||||
"last_subscription_attempt": 0,
|
||||
"attributes": {
|
||||
"0/4/0": 128,
|
||||
"0/4/65532": 1,
|
||||
|
119
tests/components/matter/fixtures/nodes/switch-unit.json
Normal file
119
tests/components/matter/fixtures/nodes/switch-unit.json
Normal file
@ -0,0 +1,119 @@
|
||||
{
|
||||
"node_id": 1,
|
||||
"date_commissioned": "2022-11-29T21:23:48.485051",
|
||||
"last_interview": "2022-11-29T21:23:48.485057",
|
||||
"interview_version": 2,
|
||||
"attributes": {
|
||||
"0/29/0": [
|
||||
{
|
||||
"deviceType": 99999,
|
||||
"revision": 1
|
||||
}
|
||||
],
|
||||
"0/29/1": [
|
||||
4, 29, 31, 40, 42, 43, 44, 48, 49, 50, 51, 52, 53, 54, 55, 59, 60, 62, 63,
|
||||
64, 65
|
||||
],
|
||||
"0/29/2": [41],
|
||||
"0/29/3": [1],
|
||||
"0/29/65532": 0,
|
||||
"0/29/65533": 1,
|
||||
"0/29/65528": [],
|
||||
"0/29/65529": [],
|
||||
"0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533],
|
||||
"0/40/0": 1,
|
||||
"0/40/1": "Nabu Casa",
|
||||
"0/40/2": 65521,
|
||||
"0/40/3": "Mock SwitchUnit",
|
||||
"0/40/4": 32768,
|
||||
"0/40/5": "Mock SwitchUnit",
|
||||
"0/40/6": "XX",
|
||||
"0/40/7": 0,
|
||||
"0/40/8": "v1.0",
|
||||
"0/40/9": 1,
|
||||
"0/40/10": "v1.0",
|
||||
"0/40/11": "20221206",
|
||||
"0/40/12": "",
|
||||
"0/40/13": "",
|
||||
"0/40/14": "",
|
||||
"0/40/15": "TEST_SN",
|
||||
"0/40/16": false,
|
||||
"0/40/17": true,
|
||||
"0/40/18": "mock-switch-unit",
|
||||
"0/40/19": {
|
||||
"caseSessionsPerFabric": 3,
|
||||
"subscriptionsPerFabric": 3
|
||||
},
|
||||
"0/40/65532": 0,
|
||||
"0/40/65533": 1,
|
||||
"0/40/65528": [],
|
||||
"0/40/65529": [],
|
||||
"0/40/65531": [
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
|
||||
65528, 65529, 65531, 65532, 65533
|
||||
],
|
||||
"1/3/0": 0,
|
||||
"1/3/1": 2,
|
||||
"1/3/65532": 0,
|
||||
"1/3/65533": 4,
|
||||
"1/3/65528": [],
|
||||
"1/3/65529": [0, 64],
|
||||
"1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533],
|
||||
"1/4/0": 128,
|
||||
"1/4/65532": 1,
|
||||
"1/4/65533": 4,
|
||||
"1/4/65528": [0, 1, 2, 3],
|
||||
"1/4/65529": [0, 1, 2, 3, 4, 5],
|
||||
"1/4/65531": [0, 65528, 65529, 65531, 65532, 65533],
|
||||
"1/5/0": 0,
|
||||
"1/5/1": 0,
|
||||
"1/5/2": 0,
|
||||
"1/5/3": false,
|
||||
"1/5/4": 0,
|
||||
"1/5/65532": 0,
|
||||
"1/5/65533": 4,
|
||||
"1/5/65528": [0, 1, 2, 3, 4, 6],
|
||||
"1/5/65529": [0, 1, 2, 3, 4, 5, 6],
|
||||
"1/5/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533],
|
||||
"1/6/0": false,
|
||||
"1/6/16384": true,
|
||||
"1/6/16385": 0,
|
||||
"1/6/16386": 0,
|
||||
"1/6/16387": null,
|
||||
"1/6/65532": 1,
|
||||
"1/6/65533": 4,
|
||||
"1/6/65528": [],
|
||||
"1/6/65529": [0, 1, 2, 64, 65, 66],
|
||||
"1/6/65531": [
|
||||
0, 16384, 16385, 16386, 16387, 65528, 65529, 65531, 65532, 65533
|
||||
],
|
||||
"1/7/0": 0,
|
||||
"1/7/16": 0,
|
||||
"1/7/65532": 0,
|
||||
"1/7/65533": 1,
|
||||
"1/7/65528": [],
|
||||
"1/7/65529": [],
|
||||
"1/7/65531": [0, 16, 65528, 65529, 65531, 65532, 65533],
|
||||
"1/29/0": [
|
||||
{
|
||||
"deviceType": 9999999,
|
||||
"revision": 1
|
||||
}
|
||||
],
|
||||
"1/29/1": [
|
||||
3, 4, 5, 6, 7, 8, 15, 29, 30, 37, 47, 59, 64, 65, 69, 80, 257, 258, 259,
|
||||
512, 513, 514, 516, 768, 1024, 1026, 1027, 1028, 1029, 1030, 1283, 1284,
|
||||
1285, 1286, 1287, 1288, 1289, 1290, 1291, 1292, 1293, 1294, 2820,
|
||||
4294048773
|
||||
],
|
||||
"1/29/2": [],
|
||||
"1/29/3": [],
|
||||
"1/29/65532": 0,
|
||||
"1/29/65533": 1,
|
||||
"1/29/65528": [],
|
||||
"1/29/65529": [],
|
||||
"1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533]
|
||||
},
|
||||
"available": true,
|
||||
"attribute_subscriptions": []
|
||||
}
|
@ -14,22 +14,30 @@ from .common import (
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(name="switch_node")
|
||||
async def switch_node_fixture(
|
||||
@pytest.fixture(name="powerplug_node")
|
||||
async def powerplug_node_fixture(
|
||||
hass: HomeAssistant, matter_client: MagicMock
|
||||
) -> MatterNode:
|
||||
"""Fixture for a switch node."""
|
||||
"""Fixture for a Powerplug node."""
|
||||
return await setup_integration_with_node_fixture(
|
||||
hass, "on-off-plugin-unit", matter_client
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(name="switch_unit")
|
||||
async def switch_unit_fixture(
|
||||
hass: HomeAssistant, matter_client: MagicMock
|
||||
) -> MatterNode:
|
||||
"""Fixture for a Switch Unit node."""
|
||||
return await setup_integration_with_node_fixture(hass, "switch-unit", matter_client)
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_turn_on(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
switch_node: MatterNode,
|
||||
powerplug_node: MatterNode,
|
||||
) -> None:
|
||||
"""Test turning on a switch."""
|
||||
state = hass.states.get("switch.mock_onoffpluginunit_powerplug_switch")
|
||||
@ -47,12 +55,12 @@ async def test_turn_on(
|
||||
|
||||
assert matter_client.send_device_command.call_count == 1
|
||||
assert matter_client.send_device_command.call_args == call(
|
||||
node_id=switch_node.node_id,
|
||||
node_id=powerplug_node.node_id,
|
||||
endpoint_id=1,
|
||||
command=clusters.OnOff.Commands.On(),
|
||||
)
|
||||
|
||||
set_node_attribute(switch_node, 1, 6, 0, True)
|
||||
set_node_attribute(powerplug_node, 1, 6, 0, True)
|
||||
await trigger_subscription_callback(hass, matter_client)
|
||||
|
||||
state = hass.states.get("switch.mock_onoffpluginunit_powerplug_switch")
|
||||
@ -65,7 +73,7 @@ async def test_turn_on(
|
||||
async def test_turn_off(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
switch_node: MatterNode,
|
||||
powerplug_node: MatterNode,
|
||||
) -> None:
|
||||
"""Test turning off a switch."""
|
||||
state = hass.states.get("switch.mock_onoffpluginunit_powerplug_switch")
|
||||
@ -83,7 +91,24 @@ async def test_turn_off(
|
||||
|
||||
assert matter_client.send_device_command.call_count == 1
|
||||
assert matter_client.send_device_command.call_args == call(
|
||||
node_id=switch_node.node_id,
|
||||
node_id=powerplug_node.node_id,
|
||||
endpoint_id=1,
|
||||
command=clusters.OnOff.Commands.Off(),
|
||||
)
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_switch_unit(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
switch_unit: MatterNode,
|
||||
) -> None:
|
||||
"""Test if a switch entity is discovered from any (non-light) OnOf cluster device."""
|
||||
# A switch entity should be discovered as fallback for ANY Matter device (endpoint)
|
||||
# that has the OnOff cluster and does not fall into an explicit discovery schema
|
||||
# by another platform (e.g. light, lock etc.).
|
||||
state = hass.states.get("switch.mock_switchunit")
|
||||
assert state
|
||||
assert state.state == "off"
|
||||
assert state.attributes["friendly_name"] == "Mock SwitchUnit"
|
||||
|
@ -1785,6 +1785,24 @@ async def test_brightness_scale(
|
||||
assert state.state == STATE_ON
|
||||
assert state.attributes.get("brightness") == 255
|
||||
|
||||
# Turn on the light with half brightness
|
||||
async_fire_mqtt_message(
|
||||
hass, "test_light_bright_scale", '{"state":"ON", "brightness": 50}'
|
||||
)
|
||||
|
||||
state = hass.states.get("light.test")
|
||||
assert state.state == STATE_ON
|
||||
assert state.attributes.get("brightness") == 128
|
||||
|
||||
# Test limmiting max brightness
|
||||
async_fire_mqtt_message(
|
||||
hass, "test_light_bright_scale", '{"state":"ON", "brightness": 103}'
|
||||
)
|
||||
|
||||
state = hass.states.get("light.test")
|
||||
assert state.state == STATE_ON
|
||||
assert state.attributes.get("brightness") == 255
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"hass_config",
|
||||
|
22
tests/components/netatmo/test_api.py
Normal file
22
tests/components/netatmo/test_api.py
Normal file
@ -0,0 +1,22 @@
|
||||
"""The tests for the Netatmo api."""
|
||||
|
||||
from pyatmo.const import ALL_SCOPES
|
||||
|
||||
from homeassistant.components import cloud
|
||||
from homeassistant.components.netatmo import api
|
||||
from homeassistant.components.netatmo.const import API_SCOPES_EXCLUDED_FROM_CLOUD
|
||||
|
||||
|
||||
async def test_get_api_scopes_cloud() -> None:
|
||||
"""Test method to get API scopes when using cloud auth implementation."""
|
||||
result = api.get_api_scopes(cloud.DOMAIN)
|
||||
|
||||
for scope in API_SCOPES_EXCLUDED_FROM_CLOUD:
|
||||
assert scope not in result
|
||||
|
||||
|
||||
async def test_get_api_scopes_other() -> None:
|
||||
"""Test method to get API scopes when using cloud auth implementation."""
|
||||
result = api.get_api_scopes("netatmo_239846i2f0j2")
|
||||
|
||||
assert sorted(ALL_SCOPES) == result
|
@ -466,6 +466,12 @@ async def test_light(client, light_entities) -> None:
|
||||
'friendly_name="PC"} 70.58823529411765' in body
|
||||
)
|
||||
|
||||
assert (
|
||||
'light_brightness_percent{domain="light",'
|
||||
'entity="light.hallway",'
|
||||
'friendly_name="Hallway"} 100.0' in body
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("namespace", [""])
|
||||
async def test_lock(client, lock_entities) -> None:
|
||||
@ -1492,6 +1498,19 @@ async def light_fixture(
|
||||
data["light_4"] = light_4
|
||||
data["light_4_attributes"] = light_4_attributes
|
||||
|
||||
light_5 = entity_registry.async_get_or_create(
|
||||
domain=light.DOMAIN,
|
||||
platform="test",
|
||||
unique_id="light_5",
|
||||
suggested_object_id="hallway",
|
||||
original_name="Hallway",
|
||||
)
|
||||
# Light is on, but brightness is unset; expect metrics to report
|
||||
# brightness of 100%.
|
||||
light_5_attributes = {light.ATTR_BRIGHTNESS: None}
|
||||
set_state_with_entry(hass, light_5, STATE_ON, light_5_attributes)
|
||||
data["light_5"] = light_5
|
||||
data["light_5_attributes"] = light_5_attributes
|
||||
await hass.async_block_till_done()
|
||||
return data
|
||||
|
||||
|
@ -2,8 +2,8 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable, Generator
|
||||
from http import HTTPStatus
|
||||
import json
|
||||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
|
||||
@ -17,17 +17,16 @@ from homeassistant.components.rainbird.const import (
|
||||
)
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker, AiohttpClientMockResponse
|
||||
|
||||
ComponentSetup = Callable[[], Awaitable[bool]]
|
||||
|
||||
HOST = "example.com"
|
||||
URL = "http://example.com/stick"
|
||||
PASSWORD = "password"
|
||||
SERIAL_NUMBER = 0x12635436566
|
||||
MAC_ADDRESS = "4C:A1:61:00:11:22"
|
||||
MAC_ADDRESS_UNIQUE_ID = "4c:a1:61:00:11:22"
|
||||
|
||||
#
|
||||
# Response payloads below come from pyrainbird test cases.
|
||||
@ -54,6 +53,20 @@ RAIN_DELAY = "B60010" # 0x10 is 16
|
||||
RAIN_DELAY_OFF = "B60000"
|
||||
# ACK command 0x10, Echo 0x06
|
||||
ACK_ECHO = "0106"
|
||||
WIFI_PARAMS_RESPONSE = {
|
||||
"macAddress": MAC_ADDRESS,
|
||||
"localIpAddress": "1.1.1.38",
|
||||
"localNetmask": "255.255.255.0",
|
||||
"localGateway": "1.1.1.1",
|
||||
"rssi": -61,
|
||||
"wifiSsid": "wifi-ssid-name",
|
||||
"wifiPassword": "wifi-password-name",
|
||||
"wifiSecurity": "wpa2-aes",
|
||||
"apTimeoutNoLan": 20,
|
||||
"apTimeoutIdle": 20,
|
||||
"apSecurity": "unknown",
|
||||
"stickVersion": "Rain Bird Stick Rev C/1.63",
|
||||
}
|
||||
|
||||
|
||||
CONFIG = {
|
||||
@ -66,10 +79,16 @@ CONFIG = {
|
||||
}
|
||||
}
|
||||
|
||||
CONFIG_ENTRY_DATA_OLD_FORMAT = {
|
||||
"host": HOST,
|
||||
"password": PASSWORD,
|
||||
"serial_number": SERIAL_NUMBER,
|
||||
}
|
||||
CONFIG_ENTRY_DATA = {
|
||||
"host": HOST,
|
||||
"password": PASSWORD,
|
||||
"serial_number": SERIAL_NUMBER,
|
||||
"mac": MAC_ADDRESS,
|
||||
}
|
||||
|
||||
|
||||
@ -80,21 +99,24 @@ def platforms() -> list[Platform]:
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def yaml_config() -> dict[str, Any]:
|
||||
"""Fixture for configuration.yaml."""
|
||||
return {}
|
||||
async def config_entry_unique_id() -> str:
|
||||
"""Fixture for config entry unique id."""
|
||||
return MAC_ADDRESS_UNIQUE_ID
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def config_entry_unique_id() -> str:
|
||||
"""Fixture for serial number used in the config entry."""
|
||||
async def serial_number() -> int:
|
||||
"""Fixture for serial number used in the config entry data."""
|
||||
return SERIAL_NUMBER
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def config_entry_data() -> dict[str, Any]:
|
||||
async def config_entry_data(serial_number: int) -> dict[str, Any]:
|
||||
"""Fixture for MockConfigEntry data."""
|
||||
return CONFIG_ENTRY_DATA
|
||||
return {
|
||||
**CONFIG_ENTRY_DATA,
|
||||
"serial_number": serial_number,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@ -122,35 +144,35 @@ async def add_config_entry(
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def setup_integration(
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup_platforms(
|
||||
hass: HomeAssistant,
|
||||
platforms: list[str],
|
||||
yaml_config: dict[str, Any],
|
||||
) -> Generator[ComponentSetup, None, None]:
|
||||
"""Fixture for setting up the component."""
|
||||
) -> None:
|
||||
"""Fixture for setting up the default platforms."""
|
||||
|
||||
with patch(f"homeassistant.components.{DOMAIN}.PLATFORMS", platforms):
|
||||
|
||||
async def func() -> bool:
|
||||
result = await async_setup_component(hass, DOMAIN, yaml_config)
|
||||
await hass.async_block_till_done()
|
||||
return result
|
||||
|
||||
yield func
|
||||
yield
|
||||
|
||||
|
||||
def rainbird_response(data: str) -> bytes:
|
||||
def rainbird_json_response(result: dict[str, str]) -> bytes:
|
||||
"""Create a fake API response."""
|
||||
return encryption.encrypt(
|
||||
'{"jsonrpc": "2.0", "result": {"data":"%s"}, "id": 1} ' % data,
|
||||
'{"jsonrpc": "2.0", "result": %s, "id": 1} ' % json.dumps(result),
|
||||
PASSWORD,
|
||||
)
|
||||
|
||||
|
||||
def mock_json_response(result: dict[str, str]) -> AiohttpClientMockResponse:
|
||||
"""Create a fake AiohttpClientMockResponse."""
|
||||
return AiohttpClientMockResponse(
|
||||
"POST", URL, response=rainbird_json_response(result)
|
||||
)
|
||||
|
||||
|
||||
def mock_response(data: str) -> AiohttpClientMockResponse:
|
||||
"""Create a fake AiohttpClientMockResponse."""
|
||||
return AiohttpClientMockResponse("POST", URL, response=rainbird_response(data))
|
||||
return mock_json_response({"data": data})
|
||||
|
||||
|
||||
def mock_response_error(
|
||||
|
@ -1,14 +1,23 @@
|
||||
"""Tests for rainbird sensor platform."""
|
||||
|
||||
|
||||
from http import HTTPStatus
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from .conftest import RAIN_SENSOR_OFF, RAIN_SENSOR_ON, SERIAL_NUMBER, ComponentSetup
|
||||
from .conftest import (
|
||||
CONFIG_ENTRY_DATA_OLD_FORMAT,
|
||||
RAIN_SENSOR_OFF,
|
||||
RAIN_SENSOR_ON,
|
||||
mock_response_error,
|
||||
)
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.test_util.aiohttp import AiohttpClientMockResponse
|
||||
|
||||
|
||||
@ -18,21 +27,27 @@ def platforms() -> list[Platform]:
|
||||
return [Platform.BINARY_SENSOR]
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
async def setup_config_entry(
|
||||
hass: HomeAssistant, config_entry: MockConfigEntry
|
||||
) -> list[Platform]:
|
||||
"""Fixture to setup the config entry."""
|
||||
await config_entry.async_setup(hass)
|
||||
assert config_entry.state == ConfigEntryState.LOADED
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("rain_response", "expected_state"),
|
||||
[(RAIN_SENSOR_OFF, "off"), (RAIN_SENSOR_ON, "on")],
|
||||
)
|
||||
async def test_rainsensor(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
responses: list[AiohttpClientMockResponse],
|
||||
entity_registry: er.EntityRegistry,
|
||||
expected_state: bool,
|
||||
) -> None:
|
||||
"""Test rainsensor binary sensor."""
|
||||
|
||||
assert await setup_integration()
|
||||
|
||||
rainsensor = hass.states.get("binary_sensor.rain_bird_controller_rainsensor")
|
||||
assert rainsensor is not None
|
||||
assert rainsensor.state == expected_state
|
||||
@ -43,53 +58,24 @@ async def test_rainsensor(
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("config_entry_unique_id", "entity_unique_id"),
|
||||
("config_entry_data", "config_entry_unique_id", "setup_config_entry"),
|
||||
[
|
||||
(SERIAL_NUMBER, "1263613994342-rainsensor"),
|
||||
# Some existing config entries may have a "0" serial number but preserve
|
||||
# their unique id
|
||||
(0, "0-rainsensor"),
|
||||
],
|
||||
)
|
||||
async def test_unique_id(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
entity_registry: er.EntityRegistry,
|
||||
entity_unique_id: str,
|
||||
) -> None:
|
||||
"""Test rainsensor binary sensor."""
|
||||
|
||||
assert await setup_integration()
|
||||
|
||||
rainsensor = hass.states.get("binary_sensor.rain_bird_controller_rainsensor")
|
||||
assert rainsensor is not None
|
||||
assert rainsensor.attributes == {
|
||||
"friendly_name": "Rain Bird Controller Rainsensor",
|
||||
"icon": "mdi:water",
|
||||
}
|
||||
|
||||
entity_entry = entity_registry.async_get(
|
||||
"binary_sensor.rain_bird_controller_rainsensor"
|
||||
)
|
||||
assert entity_entry
|
||||
assert entity_entry.unique_id == entity_unique_id
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("config_entry_unique_id"),
|
||||
[
|
||||
(None),
|
||||
(CONFIG_ENTRY_DATA_OLD_FORMAT, None, None),
|
||||
],
|
||||
)
|
||||
async def test_no_unique_id(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
responses: list[AiohttpClientMockResponse],
|
||||
entity_registry: er.EntityRegistry,
|
||||
config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test rainsensor binary sensor with no unique id."""
|
||||
|
||||
assert await setup_integration()
|
||||
# Failure to migrate config entry to a unique id
|
||||
responses.insert(0, mock_response_error(HTTPStatus.SERVICE_UNAVAILABLE))
|
||||
|
||||
await config_entry.async_setup(hass)
|
||||
assert config_entry.state == ConfigEntryState.LOADED
|
||||
|
||||
rainsensor = hass.states.get("binary_sensor.rain_bird_controller_rainsensor")
|
||||
assert rainsensor is not None
|
||||
|
@ -12,12 +12,14 @@ from aiohttp import ClientSession
|
||||
from freezegun.api import FrozenDateTimeFactory
|
||||
import pytest
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from .conftest import ComponentSetup, mock_response, mock_response_error
|
||||
from .conftest import CONFIG_ENTRY_DATA_OLD_FORMAT, mock_response, mock_response_error
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.test_util.aiohttp import AiohttpClientMockResponse
|
||||
|
||||
TEST_ENTITY = "calendar.rain_bird_controller"
|
||||
@ -80,6 +82,15 @@ def platforms() -> list[str]:
|
||||
return [Platform.CALENDAR]
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
async def setup_config_entry(
|
||||
hass: HomeAssistant, config_entry: MockConfigEntry
|
||||
) -> list[Platform]:
|
||||
"""Fixture to setup the config entry."""
|
||||
await config_entry.async_setup(hass)
|
||||
assert config_entry.state == ConfigEntryState.LOADED
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def set_time_zone(hass: HomeAssistant):
|
||||
"""Set the time zone for the tests."""
|
||||
@ -121,13 +132,9 @@ def get_events_fixture(
|
||||
|
||||
|
||||
@pytest.mark.freeze_time("2023-01-21 09:32:00")
|
||||
async def test_get_events(
|
||||
hass: HomeAssistant, setup_integration: ComponentSetup, get_events: GetEventsFn
|
||||
) -> None:
|
||||
async def test_get_events(hass: HomeAssistant, get_events: GetEventsFn) -> None:
|
||||
"""Test calendar event fetching APIs."""
|
||||
|
||||
assert await setup_integration()
|
||||
|
||||
events = await get_events("2023-01-20T00:00:00Z", "2023-02-05T00:00:00Z")
|
||||
assert events == [
|
||||
# Monday
|
||||
@ -158,31 +165,34 @@ async def test_get_events(
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("freeze_time", "expected_state"),
|
||||
("freeze_time", "expected_state", "setup_config_entry"),
|
||||
[
|
||||
(
|
||||
datetime.datetime(2023, 1, 23, 3, 50, tzinfo=ZoneInfo("America/Regina")),
|
||||
"off",
|
||||
None,
|
||||
),
|
||||
(
|
||||
datetime.datetime(2023, 1, 23, 4, 30, tzinfo=ZoneInfo("America/Regina")),
|
||||
"on",
|
||||
None,
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_event_state(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
get_events: GetEventsFn,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
freeze_time: datetime.datetime,
|
||||
expected_state: str,
|
||||
entity_registry: er.EntityRegistry,
|
||||
config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test calendar upcoming event state."""
|
||||
freezer.move_to(freeze_time)
|
||||
|
||||
assert await setup_integration()
|
||||
await config_entry.async_setup(hass)
|
||||
assert config_entry.state == ConfigEntryState.LOADED
|
||||
|
||||
state = hass.states.get(TEST_ENTITY)
|
||||
assert state is not None
|
||||
@ -200,7 +210,7 @@ async def test_event_state(
|
||||
|
||||
entity = entity_registry.async_get(TEST_ENTITY)
|
||||
assert entity
|
||||
assert entity.unique_id == 1263613994342
|
||||
assert entity.unique_id == "4c:a1:61:00:11:22"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@ -213,13 +223,10 @@ async def test_event_state(
|
||||
)
|
||||
async def test_calendar_not_supported_by_device(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
has_entity: bool,
|
||||
) -> None:
|
||||
"""Test calendar upcoming event state."""
|
||||
|
||||
assert await setup_integration()
|
||||
|
||||
state = hass.states.get(TEST_ENTITY)
|
||||
assert (state is not None) == has_entity
|
||||
|
||||
@ -229,7 +236,6 @@ async def test_calendar_not_supported_by_device(
|
||||
)
|
||||
async def test_no_schedule(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
get_events: GetEventsFn,
|
||||
responses: list[AiohttpClientMockResponse],
|
||||
hass_client: Callable[..., Awaitable[ClientSession]],
|
||||
@ -237,8 +243,6 @@ async def test_no_schedule(
|
||||
"""Test calendar error when fetching the calendar."""
|
||||
responses.extend([mock_response_error(HTTPStatus.BAD_GATEWAY)]) # Arbitrary error
|
||||
|
||||
assert await setup_integration()
|
||||
|
||||
state = hass.states.get(TEST_ENTITY)
|
||||
assert state.state == "unavailable"
|
||||
assert state.attributes == {
|
||||
@ -260,13 +264,10 @@ async def test_no_schedule(
|
||||
)
|
||||
async def test_program_schedule_disabled(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
get_events: GetEventsFn,
|
||||
) -> None:
|
||||
"""Test calendar when the program is disabled with no upcoming events."""
|
||||
|
||||
assert await setup_integration()
|
||||
|
||||
events = await get_events("2023-01-20T00:00:00Z", "2023-02-05T00:00:00Z")
|
||||
assert events == []
|
||||
|
||||
@ -279,20 +280,25 @@ async def test_program_schedule_disabled(
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("config_entry_unique_id"),
|
||||
("config_entry_data", "config_entry_unique_id", "setup_config_entry"),
|
||||
[
|
||||
(None),
|
||||
(CONFIG_ENTRY_DATA_OLD_FORMAT, None, None),
|
||||
],
|
||||
)
|
||||
async def test_no_unique_id(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
get_events: GetEventsFn,
|
||||
responses: list[AiohttpClientMockResponse],
|
||||
entity_registry: er.EntityRegistry,
|
||||
config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test calendar entity with no unique id."""
|
||||
|
||||
assert await setup_integration()
|
||||
# Failure to migrate config entry to a unique id
|
||||
responses.insert(0, mock_response_error(HTTPStatus.SERVICE_UNAVAILABLE))
|
||||
|
||||
await config_entry.async_setup(hass)
|
||||
assert config_entry.state == ConfigEntryState.LOADED
|
||||
|
||||
state = hass.states.get(TEST_ENTITY)
|
||||
assert state is not None
|
||||
|
@ -19,22 +19,25 @@ from homeassistant.data_entry_flow import FlowResult, FlowResultType
|
||||
from .conftest import (
|
||||
CONFIG_ENTRY_DATA,
|
||||
HOST,
|
||||
MAC_ADDRESS_UNIQUE_ID,
|
||||
PASSWORD,
|
||||
SERIAL_NUMBER,
|
||||
SERIAL_RESPONSE,
|
||||
URL,
|
||||
WIFI_PARAMS_RESPONSE,
|
||||
ZERO_SERIAL_RESPONSE,
|
||||
ComponentSetup,
|
||||
mock_json_response,
|
||||
mock_response,
|
||||
)
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker, AiohttpClientMockResponse
|
||||
|
||||
|
||||
@pytest.fixture(name="responses")
|
||||
def mock_responses() -> list[AiohttpClientMockResponse]:
|
||||
"""Set up fake serial number response when testing the connection."""
|
||||
return [mock_response(SERIAL_RESPONSE)]
|
||||
return [mock_response(SERIAL_RESPONSE), mock_json_response(WIFI_PARAMS_RESPONSE)]
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
@ -74,14 +77,20 @@ async def complete_flow(hass: HomeAssistant) -> FlowResult:
|
||||
("responses", "expected_config_entry", "expected_unique_id"),
|
||||
[
|
||||
(
|
||||
[mock_response(SERIAL_RESPONSE)],
|
||||
[
|
||||
mock_response(SERIAL_RESPONSE),
|
||||
mock_json_response(WIFI_PARAMS_RESPONSE),
|
||||
],
|
||||
CONFIG_ENTRY_DATA,
|
||||
SERIAL_NUMBER,
|
||||
MAC_ADDRESS_UNIQUE_ID,
|
||||
),
|
||||
(
|
||||
[mock_response(ZERO_SERIAL_RESPONSE)],
|
||||
[
|
||||
mock_response(ZERO_SERIAL_RESPONSE),
|
||||
mock_json_response(WIFI_PARAMS_RESPONSE),
|
||||
],
|
||||
{**CONFIG_ENTRY_DATA, "serial_number": 0},
|
||||
None,
|
||||
MAC_ADDRESS_UNIQUE_ID,
|
||||
),
|
||||
],
|
||||
)
|
||||
@ -115,31 +124,43 @@ async def test_controller_flow(
|
||||
(
|
||||
"other-serial-number",
|
||||
{**CONFIG_ENTRY_DATA, "host": "other-host"},
|
||||
[mock_response(SERIAL_RESPONSE)],
|
||||
[mock_response(SERIAL_RESPONSE), mock_json_response(WIFI_PARAMS_RESPONSE)],
|
||||
CONFIG_ENTRY_DATA,
|
||||
),
|
||||
(
|
||||
"11:22:33:44:55:66",
|
||||
{
|
||||
**CONFIG_ENTRY_DATA,
|
||||
"host": "other-host",
|
||||
},
|
||||
[
|
||||
mock_response(SERIAL_RESPONSE),
|
||||
mock_json_response(WIFI_PARAMS_RESPONSE),
|
||||
],
|
||||
CONFIG_ENTRY_DATA,
|
||||
),
|
||||
(
|
||||
None,
|
||||
{**CONFIG_ENTRY_DATA, "serial_number": 0, "host": "other-host"},
|
||||
[mock_response(ZERO_SERIAL_RESPONSE)],
|
||||
[
|
||||
mock_response(ZERO_SERIAL_RESPONSE),
|
||||
mock_json_response(WIFI_PARAMS_RESPONSE),
|
||||
],
|
||||
{**CONFIG_ENTRY_DATA, "serial_number": 0},
|
||||
),
|
||||
],
|
||||
ids=["with-serial", "zero-serial"],
|
||||
ids=["with-serial", "with-mac-address", "zero-serial"],
|
||||
)
|
||||
async def test_multiple_config_entries(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
config_entry: MockConfigEntry,
|
||||
responses: list[AiohttpClientMockResponse],
|
||||
config_flow_responses: list[AiohttpClientMockResponse],
|
||||
expected_config_entry: dict[str, Any] | None,
|
||||
) -> None:
|
||||
"""Test setting up multiple config entries that refer to different devices."""
|
||||
assert await setup_integration()
|
||||
|
||||
entries = hass.config_entries.async_entries(DOMAIN)
|
||||
assert len(entries) == 1
|
||||
assert entries[0].state == ConfigEntryState.LOADED
|
||||
await config_entry.async_setup(hass)
|
||||
assert config_entry.state == ConfigEntryState.LOADED
|
||||
|
||||
responses.clear()
|
||||
responses.extend(config_flow_responses)
|
||||
@ -157,43 +178,73 @@ async def test_multiple_config_entries(
|
||||
"config_entry_unique_id",
|
||||
"config_entry_data",
|
||||
"config_flow_responses",
|
||||
"expected_config_entry_data",
|
||||
),
|
||||
[
|
||||
# Config entry is a pure duplicate with the same mac address unique id
|
||||
(
|
||||
MAC_ADDRESS_UNIQUE_ID,
|
||||
CONFIG_ENTRY_DATA,
|
||||
[
|
||||
mock_response(SERIAL_RESPONSE),
|
||||
mock_json_response(WIFI_PARAMS_RESPONSE),
|
||||
],
|
||||
CONFIG_ENTRY_DATA,
|
||||
),
|
||||
# Old unique id with serial, but same host
|
||||
(
|
||||
SERIAL_NUMBER,
|
||||
CONFIG_ENTRY_DATA,
|
||||
[mock_response(SERIAL_RESPONSE)],
|
||||
[mock_response(SERIAL_RESPONSE), mock_json_response(WIFI_PARAMS_RESPONSE)],
|
||||
CONFIG_ENTRY_DATA,
|
||||
),
|
||||
# Old unique id with no serial, but same host
|
||||
(
|
||||
None,
|
||||
{**CONFIG_ENTRY_DATA, "serial_number": 0},
|
||||
[mock_response(ZERO_SERIAL_RESPONSE)],
|
||||
[
|
||||
mock_response(ZERO_SERIAL_RESPONSE),
|
||||
mock_json_response(WIFI_PARAMS_RESPONSE),
|
||||
],
|
||||
{**CONFIG_ENTRY_DATA, "serial_number": 0},
|
||||
),
|
||||
# Enters a different hostname that points to the same mac address
|
||||
(
|
||||
MAC_ADDRESS_UNIQUE_ID,
|
||||
{
|
||||
**CONFIG_ENTRY_DATA,
|
||||
"host": f"other-{HOST}",
|
||||
},
|
||||
[mock_response(SERIAL_RESPONSE), mock_json_response(WIFI_PARAMS_RESPONSE)],
|
||||
CONFIG_ENTRY_DATA, # Updated the host
|
||||
),
|
||||
],
|
||||
ids=[
|
||||
"duplicate-serial-number",
|
||||
"duplicate-mac-unique-id",
|
||||
"duplicate-host-legacy-serial-number",
|
||||
"duplicate-host-port-no-serial",
|
||||
"duplicate-duplicate-hostname",
|
||||
],
|
||||
)
|
||||
async def test_duplicate_config_entries(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
config_entry: MockConfigEntry,
|
||||
responses: list[AiohttpClientMockResponse],
|
||||
config_flow_responses: list[AiohttpClientMockResponse],
|
||||
expected_config_entry_data: dict[str, Any],
|
||||
) -> None:
|
||||
"""Test that a device can not be registered twice."""
|
||||
assert await setup_integration()
|
||||
|
||||
entries = hass.config_entries.async_entries(DOMAIN)
|
||||
assert len(entries) == 1
|
||||
assert entries[0].state == ConfigEntryState.LOADED
|
||||
await config_entry.async_setup(hass)
|
||||
assert config_entry.state == ConfigEntryState.LOADED
|
||||
|
||||
responses.clear()
|
||||
responses.extend(config_flow_responses)
|
||||
|
||||
result = await complete_flow(hass)
|
||||
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
|
||||
assert result.get("type") == FlowResultType.ABORT
|
||||
assert result.get("reason") == "already_configured"
|
||||
assert dict(config_entry.data) == expected_config_entry_data
|
||||
|
||||
|
||||
async def test_controller_cannot_connect(
|
||||
|
@ -6,81 +6,71 @@ from http import HTTPStatus
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.rainbird import DOMAIN
|
||||
from homeassistant.components.rainbird.const import DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import CONF_MAC
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from .conftest import (
|
||||
CONFIG_ENTRY_DATA,
|
||||
CONFIG_ENTRY_DATA_OLD_FORMAT,
|
||||
MAC_ADDRESS,
|
||||
MAC_ADDRESS_UNIQUE_ID,
|
||||
MODEL_AND_VERSION_RESPONSE,
|
||||
ComponentSetup,
|
||||
SERIAL_NUMBER,
|
||||
WIFI_PARAMS_RESPONSE,
|
||||
mock_json_response,
|
||||
mock_response,
|
||||
mock_response_error,
|
||||
)
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.test_util.aiohttp import AiohttpClientMockResponse
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("yaml_config", "config_entry_data", "initial_response"),
|
||||
[
|
||||
({}, CONFIG_ENTRY_DATA, None),
|
||||
],
|
||||
ids=["config_entry"],
|
||||
)
|
||||
async def test_init_success(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
responses: list[AiohttpClientMockResponse],
|
||||
initial_response: AiohttpClientMockResponse | None,
|
||||
config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test successful setup and unload."""
|
||||
if initial_response:
|
||||
responses.insert(0, initial_response)
|
||||
|
||||
assert await setup_integration()
|
||||
await config_entry.async_setup(hass)
|
||||
assert config_entry.state == ConfigEntryState.LOADED
|
||||
|
||||
entries = hass.config_entries.async_entries(DOMAIN)
|
||||
assert len(entries) == 1
|
||||
assert entries[0].state == ConfigEntryState.LOADED
|
||||
|
||||
await hass.config_entries.async_unload(entries[0].entry_id)
|
||||
await hass.config_entries.async_unload(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
assert entries[0].state is ConfigEntryState.NOT_LOADED
|
||||
assert config_entry.state is ConfigEntryState.NOT_LOADED
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("yaml_config", "config_entry_data", "responses", "config_entry_states"),
|
||||
("config_entry_data", "responses", "config_entry_state"),
|
||||
[
|
||||
(
|
||||
{},
|
||||
CONFIG_ENTRY_DATA,
|
||||
[mock_response_error(HTTPStatus.SERVICE_UNAVAILABLE)],
|
||||
[ConfigEntryState.SETUP_RETRY],
|
||||
ConfigEntryState.SETUP_RETRY,
|
||||
),
|
||||
(
|
||||
{},
|
||||
CONFIG_ENTRY_DATA,
|
||||
[mock_response_error(HTTPStatus.INTERNAL_SERVER_ERROR)],
|
||||
[ConfigEntryState.SETUP_RETRY],
|
||||
ConfigEntryState.SETUP_RETRY,
|
||||
),
|
||||
(
|
||||
{},
|
||||
CONFIG_ENTRY_DATA,
|
||||
[
|
||||
mock_response(MODEL_AND_VERSION_RESPONSE),
|
||||
mock_response_error(HTTPStatus.SERVICE_UNAVAILABLE),
|
||||
],
|
||||
[ConfigEntryState.SETUP_RETRY],
|
||||
ConfigEntryState.SETUP_RETRY,
|
||||
),
|
||||
(
|
||||
{},
|
||||
CONFIG_ENTRY_DATA,
|
||||
[
|
||||
mock_response(MODEL_AND_VERSION_RESPONSE),
|
||||
mock_response_error(HTTPStatus.INTERNAL_SERVER_ERROR),
|
||||
],
|
||||
[ConfigEntryState.SETUP_RETRY],
|
||||
ConfigEntryState.SETUP_RETRY,
|
||||
),
|
||||
],
|
||||
ids=[
|
||||
@ -92,13 +82,200 @@ async def test_init_success(
|
||||
)
|
||||
async def test_communication_failure(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
config_entry_states: list[ConfigEntryState],
|
||||
config_entry: MockConfigEntry,
|
||||
config_entry_state: list[ConfigEntryState],
|
||||
) -> None:
|
||||
"""Test unable to talk to device on startup, which fails setup."""
|
||||
await config_entry.async_setup(hass)
|
||||
assert config_entry.state == config_entry_state
|
||||
|
||||
assert await setup_integration()
|
||||
|
||||
assert [
|
||||
entry.state for entry in hass.config_entries.async_entries(DOMAIN)
|
||||
] == config_entry_states
|
||||
@pytest.mark.parametrize(
|
||||
("config_entry_unique_id", "config_entry_data"),
|
||||
[
|
||||
(
|
||||
None,
|
||||
{**CONFIG_ENTRY_DATA, "mac": None},
|
||||
),
|
||||
],
|
||||
ids=["config_entry"],
|
||||
)
|
||||
async def test_fix_unique_id(
|
||||
hass: HomeAssistant,
|
||||
responses: list[AiohttpClientMockResponse],
|
||||
config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test fix of a config entry with no unique id."""
|
||||
|
||||
responses.insert(0, mock_json_response(WIFI_PARAMS_RESPONSE))
|
||||
|
||||
entries = hass.config_entries.async_entries(DOMAIN)
|
||||
assert len(entries) == 1
|
||||
assert entries[0].state == ConfigEntryState.NOT_LOADED
|
||||
assert entries[0].unique_id is None
|
||||
assert entries[0].data.get(CONF_MAC) is None
|
||||
|
||||
await config_entry.async_setup(hass)
|
||||
assert config_entry.state == ConfigEntryState.LOADED
|
||||
|
||||
# Verify config entry now has a unique id
|
||||
entries = hass.config_entries.async_entries(DOMAIN)
|
||||
assert len(entries) == 1
|
||||
assert entries[0].state == ConfigEntryState.LOADED
|
||||
assert entries[0].unique_id == MAC_ADDRESS_UNIQUE_ID
|
||||
assert entries[0].data.get(CONF_MAC) == MAC_ADDRESS
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
(
|
||||
"config_entry_unique_id",
|
||||
"config_entry_data",
|
||||
"initial_response",
|
||||
"expected_warning",
|
||||
),
|
||||
[
|
||||
(
|
||||
None,
|
||||
CONFIG_ENTRY_DATA_OLD_FORMAT,
|
||||
mock_response_error(HTTPStatus.SERVICE_UNAVAILABLE),
|
||||
"Unable to fix missing unique id:",
|
||||
),
|
||||
(
|
||||
None,
|
||||
CONFIG_ENTRY_DATA_OLD_FORMAT,
|
||||
mock_response_error(HTTPStatus.NOT_FOUND),
|
||||
"Unable to fix missing unique id:",
|
||||
),
|
||||
(
|
||||
None,
|
||||
CONFIG_ENTRY_DATA_OLD_FORMAT,
|
||||
mock_response("bogus"),
|
||||
"Unable to fix missing unique id (mac address was None)",
|
||||
),
|
||||
],
|
||||
ids=["service_unavailable", "not_found", "unexpected_response_format"],
|
||||
)
|
||||
async def test_fix_unique_id_failure(
|
||||
hass: HomeAssistant,
|
||||
initial_response: AiohttpClientMockResponse,
|
||||
responses: list[AiohttpClientMockResponse],
|
||||
expected_warning: str,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test a failure during fix of a config entry with no unique id."""
|
||||
|
||||
responses.insert(0, initial_response)
|
||||
|
||||
await config_entry.async_setup(hass)
|
||||
# Config entry is loaded, but not updated
|
||||
assert config_entry.state == ConfigEntryState.LOADED
|
||||
assert config_entry.unique_id is None
|
||||
|
||||
assert expected_warning in caplog.text
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("config_entry_unique_id"),
|
||||
[(MAC_ADDRESS_UNIQUE_ID)],
|
||||
)
|
||||
async def test_fix_unique_id_duplicate(
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
responses: list[AiohttpClientMockResponse],
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test that a config entry unique id already exists during fix."""
|
||||
# Add a second config entry that has no unique id, but has the same
|
||||
# mac address. When fixing the unique id, it can't use the mac address
|
||||
# since it already exists.
|
||||
other_entry = MockConfigEntry(
|
||||
unique_id=None,
|
||||
domain=DOMAIN,
|
||||
data=CONFIG_ENTRY_DATA_OLD_FORMAT,
|
||||
)
|
||||
other_entry.add_to_hass(hass)
|
||||
|
||||
# Responses for the second config entry. This first fetches wifi params
|
||||
# to repair the unique id.
|
||||
responses_copy = [*responses]
|
||||
responses.append(mock_json_response(WIFI_PARAMS_RESPONSE))
|
||||
responses.extend(responses_copy)
|
||||
|
||||
await config_entry.async_setup(hass)
|
||||
assert config_entry.state == ConfigEntryState.LOADED
|
||||
assert config_entry.unique_id == MAC_ADDRESS_UNIQUE_ID
|
||||
|
||||
await other_entry.async_setup(hass)
|
||||
# Config entry unique id could not be updated since it already exists
|
||||
assert other_entry.state == ConfigEntryState.SETUP_ERROR
|
||||
|
||||
assert "Unable to fix missing unique id (already exists)" in caplog.text
|
||||
|
||||
await hass.async_block_till_done()
|
||||
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
(
|
||||
"config_entry_unique_id",
|
||||
"serial_number",
|
||||
"entity_unique_id",
|
||||
"expected_unique_id",
|
||||
),
|
||||
[
|
||||
(SERIAL_NUMBER, SERIAL_NUMBER, SERIAL_NUMBER, MAC_ADDRESS_UNIQUE_ID),
|
||||
(
|
||||
SERIAL_NUMBER,
|
||||
SERIAL_NUMBER,
|
||||
f"{SERIAL_NUMBER}-rain-delay",
|
||||
f"{MAC_ADDRESS_UNIQUE_ID}-rain-delay",
|
||||
),
|
||||
("0", 0, "0", MAC_ADDRESS_UNIQUE_ID),
|
||||
(
|
||||
"0",
|
||||
0,
|
||||
"0-rain-delay",
|
||||
f"{MAC_ADDRESS_UNIQUE_ID}-rain-delay",
|
||||
),
|
||||
(
|
||||
MAC_ADDRESS_UNIQUE_ID,
|
||||
SERIAL_NUMBER,
|
||||
MAC_ADDRESS_UNIQUE_ID,
|
||||
MAC_ADDRESS_UNIQUE_ID,
|
||||
),
|
||||
(
|
||||
MAC_ADDRESS_UNIQUE_ID,
|
||||
SERIAL_NUMBER,
|
||||
f"{MAC_ADDRESS_UNIQUE_ID}-rain-delay",
|
||||
f"{MAC_ADDRESS_UNIQUE_ID}-rain-delay",
|
||||
),
|
||||
],
|
||||
ids=(
|
||||
"serial-number",
|
||||
"serial-number-with-suffix",
|
||||
"zero-serial",
|
||||
"zero-serial-suffix",
|
||||
"new-format",
|
||||
"new-format-suffx",
|
||||
),
|
||||
)
|
||||
async def test_fix_entity_unique_ids(
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
entity_unique_id: str,
|
||||
expected_unique_id: str,
|
||||
) -> None:
|
||||
"""Test fixing entity unique ids from old unique id formats."""
|
||||
|
||||
entity_registry = er.async_get(hass)
|
||||
entity_entry = entity_registry.async_get_or_create(
|
||||
DOMAIN, "number", unique_id=entity_unique_id, config_entry=config_entry
|
||||
)
|
||||
|
||||
await config_entry.async_setup(hass)
|
||||
assert config_entry.state == ConfigEntryState.LOADED
|
||||
|
||||
entity_entry = entity_registry.async_get(entity_entry.id)
|
||||
assert entity_entry
|
||||
assert entity_entry.unique_id == expected_unique_id
|
||||
|
@ -6,7 +6,7 @@ import pytest
|
||||
|
||||
from homeassistant.components import number
|
||||
from homeassistant.components.rainbird import DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import ATTR_ENTITY_ID, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@ -14,15 +14,16 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
|
||||
from .conftest import (
|
||||
ACK_ECHO,
|
||||
CONFIG_ENTRY_DATA_OLD_FORMAT,
|
||||
MAC_ADDRESS,
|
||||
RAIN_DELAY,
|
||||
RAIN_DELAY_OFF,
|
||||
SERIAL_NUMBER,
|
||||
ComponentSetup,
|
||||
mock_response,
|
||||
mock_response_error,
|
||||
)
|
||||
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker, AiohttpClientMockResponse
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@ -31,20 +32,26 @@ def platforms() -> list[str]:
|
||||
return [Platform.NUMBER]
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
async def setup_config_entry(
|
||||
hass: HomeAssistant, config_entry: MockConfigEntry
|
||||
) -> list[Platform]:
|
||||
"""Fixture to setup the config entry."""
|
||||
await config_entry.async_setup(hass)
|
||||
assert config_entry.state == ConfigEntryState.LOADED
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("rain_delay_response", "expected_state"),
|
||||
[(RAIN_DELAY, "16"), (RAIN_DELAY_OFF, "0")],
|
||||
)
|
||||
async def test_number_values(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
expected_state: str,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
"""Test number platform."""
|
||||
|
||||
assert await setup_integration()
|
||||
|
||||
raindelay = hass.states.get("number.rain_bird_controller_rain_delay")
|
||||
assert raindelay is not None
|
||||
assert raindelay.state == expected_state
|
||||
@ -60,52 +67,23 @@ async def test_number_values(
|
||||
|
||||
entity_entry = entity_registry.async_get("number.rain_bird_controller_rain_delay")
|
||||
assert entity_entry
|
||||
assert entity_entry.unique_id == "1263613994342-rain-delay"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("config_entry_unique_id", "entity_unique_id"),
|
||||
[
|
||||
(SERIAL_NUMBER, "1263613994342-rain-delay"),
|
||||
# Some existing config entries may have a "0" serial number but preserve
|
||||
# their unique id
|
||||
(0, "0-rain-delay"),
|
||||
],
|
||||
)
|
||||
async def test_unique_id(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
entity_registry: er.EntityRegistry,
|
||||
entity_unique_id: str,
|
||||
) -> None:
|
||||
"""Test number platform."""
|
||||
|
||||
assert await setup_integration()
|
||||
|
||||
raindelay = hass.states.get("number.rain_bird_controller_rain_delay")
|
||||
assert raindelay is not None
|
||||
assert (
|
||||
raindelay.attributes.get("friendly_name") == "Rain Bird Controller Rain delay"
|
||||
)
|
||||
|
||||
entity_entry = entity_registry.async_get("number.rain_bird_controller_rain_delay")
|
||||
assert entity_entry
|
||||
assert entity_entry.unique_id == entity_unique_id
|
||||
assert entity_entry.unique_id == "4c:a1:61:00:11:22-rain-delay"
|
||||
|
||||
|
||||
async def test_set_value(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
responses: list[str],
|
||||
config_entry: ConfigEntry,
|
||||
) -> None:
|
||||
"""Test setting the rain delay number."""
|
||||
|
||||
assert await setup_integration()
|
||||
raindelay = hass.states.get("number.rain_bird_controller_rain_delay")
|
||||
assert raindelay is not None
|
||||
|
||||
device_registry = dr.async_get(hass)
|
||||
device = device_registry.async_get_device(identifiers={(DOMAIN, SERIAL_NUMBER)})
|
||||
device = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, MAC_ADDRESS.lower())}
|
||||
)
|
||||
assert device
|
||||
assert device.name == "Rain Bird Controller"
|
||||
assert device.model == "ESP-TM2"
|
||||
@ -136,17 +114,13 @@ async def test_set_value(
|
||||
)
|
||||
async def test_set_value_error(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
responses: list[str],
|
||||
config_entry: ConfigEntry,
|
||||
status: HTTPStatus,
|
||||
expected_msg: str,
|
||||
) -> None:
|
||||
"""Test an error while talking to the device."""
|
||||
|
||||
assert await setup_integration()
|
||||
|
||||
aioclient_mock.mock_calls.clear()
|
||||
responses.append(mock_response_error(status=status))
|
||||
|
||||
@ -165,19 +139,24 @@ async def test_set_value_error(
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("config_entry_unique_id"),
|
||||
("config_entry_data", "config_entry_unique_id", "setup_config_entry"),
|
||||
[
|
||||
(None),
|
||||
(CONFIG_ENTRY_DATA_OLD_FORMAT, None, None),
|
||||
],
|
||||
)
|
||||
async def test_no_unique_id(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
responses: list[AiohttpClientMockResponse],
|
||||
entity_registry: er.EntityRegistry,
|
||||
config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test number platform with no unique id."""
|
||||
|
||||
assert await setup_integration()
|
||||
# Failure to migrate config entry to a unique id
|
||||
responses.insert(0, mock_response_error(HTTPStatus.SERVICE_UNAVAILABLE))
|
||||
|
||||
await config_entry.async_setup(hass)
|
||||
assert config_entry.state == ConfigEntryState.LOADED
|
||||
|
||||
raindelay = hass.states.get("number.rain_bird_controller_rain_delay")
|
||||
assert raindelay is not None
|
||||
|
@ -1,13 +1,23 @@
|
||||
"""Tests for rainbird sensor platform."""
|
||||
|
||||
from http import HTTPStatus
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from .conftest import CONFIG_ENTRY_DATA, RAIN_DELAY, RAIN_DELAY_OFF, ComponentSetup
|
||||
from .conftest import (
|
||||
CONFIG_ENTRY_DATA_OLD_FORMAT,
|
||||
RAIN_DELAY,
|
||||
RAIN_DELAY_OFF,
|
||||
mock_response_error,
|
||||
)
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.test_util.aiohttp import AiohttpClientMockResponse
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@ -16,20 +26,26 @@ def platforms() -> list[str]:
|
||||
return [Platform.SENSOR]
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
async def setup_config_entry(
|
||||
hass: HomeAssistant, config_entry: MockConfigEntry
|
||||
) -> list[Platform]:
|
||||
"""Fixture to setup the config entry."""
|
||||
await config_entry.async_setup(hass)
|
||||
assert config_entry.state == ConfigEntryState.LOADED
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("rain_delay_response", "expected_state"),
|
||||
[(RAIN_DELAY, "16"), (RAIN_DELAY_OFF, "0")],
|
||||
)
|
||||
async def test_sensors(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
entity_registry: er.EntityRegistry,
|
||||
expected_state: str,
|
||||
) -> None:
|
||||
"""Test sensor platform."""
|
||||
|
||||
assert await setup_integration()
|
||||
|
||||
raindelay = hass.states.get("sensor.rain_bird_controller_raindelay")
|
||||
assert raindelay is not None
|
||||
assert raindelay.state == expected_state
|
||||
@ -40,39 +56,37 @@ async def test_sensors(
|
||||
|
||||
entity_entry = entity_registry.async_get("sensor.rain_bird_controller_raindelay")
|
||||
assert entity_entry
|
||||
assert entity_entry.unique_id == "1263613994342-raindelay"
|
||||
assert entity_entry.unique_id == "4c:a1:61:00:11:22-raindelay"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("config_entry_unique_id", "config_entry_data"),
|
||||
("config_entry_unique_id", "config_entry_data", "setup_config_entry"),
|
||||
[
|
||||
# Config entry setup without a unique id since it had no serial number
|
||||
(
|
||||
None,
|
||||
{
|
||||
**CONFIG_ENTRY_DATA,
|
||||
"serial_number": 0,
|
||||
},
|
||||
),
|
||||
# Legacy case for old config entries with serial number 0 preserves old behavior
|
||||
(
|
||||
"0",
|
||||
{
|
||||
**CONFIG_ENTRY_DATA,
|
||||
**CONFIG_ENTRY_DATA_OLD_FORMAT,
|
||||
"serial_number": 0,
|
||||
},
|
||||
None,
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_sensor_no_unique_id(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
entity_registry: er.EntityRegistry,
|
||||
responses: list[AiohttpClientMockResponse],
|
||||
config_entry_unique_id: str | None,
|
||||
config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test sensor platform with no unique id."""
|
||||
|
||||
assert await setup_integration()
|
||||
# Failure to migrate config entry to a unique id
|
||||
responses.insert(0, mock_response_error(HTTPStatus.SERVICE_UNAVAILABLE))
|
||||
|
||||
await config_entry.async_setup(hass)
|
||||
assert config_entry.state == ConfigEntryState.LOADED
|
||||
|
||||
raindelay = hass.states.get("sensor.rain_bird_controller_raindelay")
|
||||
assert raindelay is not None
|
||||
|
@ -5,6 +5,7 @@ from http import HTTPStatus
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.rainbird import DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import ATTR_ENTITY_ID, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@ -12,20 +13,21 @@ from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from .conftest import (
|
||||
ACK_ECHO,
|
||||
CONFIG_ENTRY_DATA_OLD_FORMAT,
|
||||
EMPTY_STATIONS_RESPONSE,
|
||||
HOST,
|
||||
MAC_ADDRESS,
|
||||
PASSWORD,
|
||||
RAIN_DELAY_OFF,
|
||||
RAIN_SENSOR_OFF,
|
||||
SERIAL_NUMBER,
|
||||
ZONE_3_ON_RESPONSE,
|
||||
ZONE_5_ON_RESPONSE,
|
||||
ZONE_OFF_RESPONSE,
|
||||
ComponentSetup,
|
||||
mock_response,
|
||||
mock_response_error,
|
||||
)
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.components.switch import common as switch_common
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker, AiohttpClientMockResponse
|
||||
|
||||
@ -36,18 +38,24 @@ def platforms() -> list[str]:
|
||||
return [Platform.SWITCH]
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
async def setup_config_entry(
|
||||
hass: HomeAssistant, config_entry: MockConfigEntry
|
||||
) -> list[Platform]:
|
||||
"""Fixture to setup the config entry."""
|
||||
await config_entry.async_setup(hass)
|
||||
assert config_entry.state == ConfigEntryState.LOADED
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"stations_response",
|
||||
[EMPTY_STATIONS_RESPONSE],
|
||||
)
|
||||
async def test_no_zones(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
) -> None:
|
||||
"""Test case where listing stations returns no stations."""
|
||||
|
||||
assert await setup_integration()
|
||||
|
||||
zone = hass.states.get("switch.rain_bird_sprinkler_1")
|
||||
assert zone is None
|
||||
|
||||
@ -58,13 +66,10 @@ async def test_no_zones(
|
||||
)
|
||||
async def test_zones(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
"""Test switch platform with fake data that creates 7 zones with one enabled."""
|
||||
|
||||
assert await setup_integration()
|
||||
|
||||
zone = hass.states.get("switch.rain_bird_sprinkler_1")
|
||||
assert zone is not None
|
||||
assert zone.state == "off"
|
||||
@ -105,19 +110,16 @@ async def test_zones(
|
||||
|
||||
# Verify unique id for one of the switches
|
||||
entity_entry = entity_registry.async_get("switch.rain_bird_sprinkler_3")
|
||||
assert entity_entry.unique_id == "1263613994342-3"
|
||||
assert entity_entry.unique_id == "4c:a1:61:00:11:22-3"
|
||||
|
||||
|
||||
async def test_switch_on(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
responses: list[AiohttpClientMockResponse],
|
||||
) -> None:
|
||||
"""Test turning on irrigation switch."""
|
||||
|
||||
assert await setup_integration()
|
||||
|
||||
# Initially all zones are off. Pick zone3 as an arbitrary to assert
|
||||
# state, then update below as a switch.
|
||||
zone = hass.states.get("switch.rain_bird_sprinkler_3")
|
||||
@ -149,14 +151,11 @@ async def test_switch_on(
|
||||
)
|
||||
async def test_switch_off(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
responses: list[AiohttpClientMockResponse],
|
||||
) -> None:
|
||||
"""Test turning off irrigation switch."""
|
||||
|
||||
assert await setup_integration()
|
||||
|
||||
# Initially the test zone is on
|
||||
zone = hass.states.get("switch.rain_bird_sprinkler_3")
|
||||
assert zone is not None
|
||||
@ -182,15 +181,12 @@ async def test_switch_off(
|
||||
|
||||
async def test_irrigation_service(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
responses: list[AiohttpClientMockResponse],
|
||||
api_responses: list[str],
|
||||
) -> None:
|
||||
"""Test calling the irrigation service."""
|
||||
|
||||
assert await setup_integration()
|
||||
|
||||
zone = hass.states.get("switch.rain_bird_sprinkler_3")
|
||||
assert zone is not None
|
||||
assert zone.state == "off"
|
||||
@ -219,10 +215,9 @@ async def test_irrigation_service(
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("yaml_config", "config_entry_data"),
|
||||
("config_entry_data"),
|
||||
[
|
||||
(
|
||||
{},
|
||||
{
|
||||
"host": HOST,
|
||||
"password": PASSWORD,
|
||||
@ -232,17 +227,16 @@ async def test_irrigation_service(
|
||||
"1": "Garden Sprinkler",
|
||||
"2": "Back Yard",
|
||||
},
|
||||
},
|
||||
"mac": MAC_ADDRESS,
|
||||
}
|
||||
)
|
||||
],
|
||||
)
|
||||
async def test_yaml_imported_config(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
responses: list[AiohttpClientMockResponse],
|
||||
) -> None:
|
||||
"""Test a config entry that was previously imported from yaml."""
|
||||
assert await setup_integration()
|
||||
|
||||
assert hass.states.get("switch.garden_sprinkler")
|
||||
assert not hass.states.get("switch.rain_bird_sprinkler_1")
|
||||
@ -260,7 +254,6 @@ async def test_yaml_imported_config(
|
||||
)
|
||||
async def test_switch_error(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
responses: list[AiohttpClientMockResponse],
|
||||
status: HTTPStatus,
|
||||
@ -268,8 +261,6 @@ async def test_switch_error(
|
||||
) -> None:
|
||||
"""Test an error talking to the device."""
|
||||
|
||||
assert await setup_integration()
|
||||
|
||||
aioclient_mock.mock_calls.clear()
|
||||
responses.append(mock_response_error(status=status))
|
||||
|
||||
@ -285,21 +276,25 @@ async def test_switch_error(
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("config_entry_unique_id"),
|
||||
("config_entry_data", "config_entry_unique_id", "setup_config_entry"),
|
||||
[
|
||||
(None),
|
||||
(CONFIG_ENTRY_DATA_OLD_FORMAT, None, None),
|
||||
],
|
||||
)
|
||||
async def test_no_unique_id(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
responses: list[AiohttpClientMockResponse],
|
||||
entity_registry: er.EntityRegistry,
|
||||
config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test an irrigation switch with no unique id."""
|
||||
"""Test an irrigation switch with no unique id due to migration failure."""
|
||||
|
||||
assert await setup_integration()
|
||||
# Failure to migrate config entry to a unique id
|
||||
responses.insert(0, mock_response_error(HTTPStatus.SERVICE_UNAVAILABLE))
|
||||
|
||||
await config_entry.async_setup(hass)
|
||||
assert config_entry.state == ConfigEntryState.LOADED
|
||||
|
||||
zone = hass.states.get("switch.rain_bird_sprinkler_3")
|
||||
assert zone is not None
|
||||
@ -308,34 +303,3 @@ async def test_no_unique_id(
|
||||
|
||||
entity_entry = entity_registry.async_get("switch.rain_bird_sprinkler_3")
|
||||
assert entity_entry is None
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("config_entry_unique_id", "entity_unique_id"),
|
||||
[
|
||||
(SERIAL_NUMBER, "1263613994342-3"),
|
||||
# Some existing config entries may have a "0" serial number but preserve
|
||||
# their unique id
|
||||
(0, "0-3"),
|
||||
],
|
||||
)
|
||||
async def test_has_unique_id(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
responses: list[AiohttpClientMockResponse],
|
||||
entity_registry: er.EntityRegistry,
|
||||
entity_unique_id: str,
|
||||
) -> None:
|
||||
"""Test an irrigation switch with no unique id."""
|
||||
|
||||
assert await setup_integration()
|
||||
|
||||
zone = hass.states.get("switch.rain_bird_sprinkler_3")
|
||||
assert zone is not None
|
||||
assert zone.attributes.get("friendly_name") == "Rain Bird Sprinkler 3"
|
||||
assert zone.state == "off"
|
||||
|
||||
entity_entry = entity_registry.async_get("switch.rain_bird_sprinkler_3")
|
||||
assert entity_entry
|
||||
assert entity_entry.unique_id == entity_unique_id
|
||||
|
@ -157,7 +157,7 @@ def test_config_verify_ssl_but_no_ssl_enabled(
|
||||
assert "_http_id=1234567890" in result.req.body
|
||||
assert "exec=devlist" in result.req.body
|
||||
assert mock_session_send.call_count == 1
|
||||
assert mock_session_send.mock_calls[0] == mock.call(result.req, timeout=3)
|
||||
assert mock_session_send.mock_calls[0] == mock.call(result.req, timeout=60)
|
||||
|
||||
|
||||
@mock.patch("os.access", return_value=True)
|
||||
@ -192,7 +192,7 @@ def test_config_valid_verify_ssl_path(hass: HomeAssistant, mock_session_send) ->
|
||||
assert "exec=devlist" in result.req.body
|
||||
assert mock_session_send.call_count == 1
|
||||
assert mock_session_send.mock_calls[0] == mock.call(
|
||||
result.req, timeout=3, verify="/test/tomato.crt"
|
||||
result.req, timeout=60, verify="/test/tomato.crt"
|
||||
)
|
||||
|
||||
|
||||
@ -223,7 +223,7 @@ def test_config_valid_verify_ssl_bool(hass: HomeAssistant, mock_session_send) ->
|
||||
assert "exec=devlist" in result.req.body
|
||||
assert mock_session_send.call_count == 1
|
||||
assert mock_session_send.mock_calls[0] == mock.call(
|
||||
result.req, timeout=3, verify=False
|
||||
result.req, timeout=60, verify=False
|
||||
)
|
||||
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user