mirror of
https://github.com/home-assistant/core.git
synced 2025-11-08 02:19:31 +00:00
Compare commits
1 Commits
dev
...
karwosts-p
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1396b5c502 |
@@ -179,18 +179,12 @@ class Data:
|
||||
user_hash = base64.b64decode(found["password"])
|
||||
|
||||
# bcrypt.checkpw is timing-safe
|
||||
# With bcrypt 5.0 passing a password longer than 72 bytes raises a ValueError.
|
||||
# Previously the password was silently truncated.
|
||||
# https://github.com/pyca/bcrypt/pull/1000
|
||||
if not bcrypt.checkpw(password.encode()[:72], user_hash):
|
||||
if not bcrypt.checkpw(password.encode(), user_hash):
|
||||
raise InvalidAuth
|
||||
|
||||
def hash_password(self, password: str, for_storage: bool = False) -> bytes:
|
||||
"""Encode a password."""
|
||||
# With bcrypt 5.0 passing a password longer than 72 bytes raises a ValueError.
|
||||
# Previously the password was silently truncated.
|
||||
# https://github.com/pyca/bcrypt/pull/1000
|
||||
hashed: bytes = bcrypt.hashpw(password.encode()[:72], bcrypt.gensalt(rounds=12))
|
||||
hashed: bytes = bcrypt.hashpw(password.encode(), bcrypt.gensalt(rounds=12))
|
||||
|
||||
if for_storage:
|
||||
hashed = base64.b64encode(hashed)
|
||||
|
||||
@@ -23,7 +23,7 @@ from homeassistant.components.bluetooth import (
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ADDRESS
|
||||
|
||||
from .const import DEVICE_MODEL, DOMAIN, MFCT_ID
|
||||
from .const import DOMAIN, MFCT_ID
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -128,15 +128,15 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm discovery."""
|
||||
assert self._discovered_device is not None
|
||||
|
||||
if user_input is not None:
|
||||
if self._discovered_device.device.firmware.need_firmware_upgrade:
|
||||
if (
|
||||
self._discovered_device is not None
|
||||
and self._discovered_device.device.firmware.need_firmware_upgrade
|
||||
):
|
||||
return self.async_abort(reason="firmware_upgrade_required")
|
||||
|
||||
return self.async_create_entry(
|
||||
title=self.context["title_placeholders"]["name"],
|
||||
data={DEVICE_MODEL: self._discovered_device.device.model.value},
|
||||
title=self.context["title_placeholders"]["name"], data={}
|
||||
)
|
||||
|
||||
self._set_confirm_only()
|
||||
@@ -164,10 +164,7 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
self._discovered_device = discovery
|
||||
|
||||
return self.async_create_entry(
|
||||
title=discovery.name,
|
||||
data={DEVICE_MODEL: discovery.device.model.value},
|
||||
)
|
||||
return self.async_create_entry(title=discovery.name, data={})
|
||||
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
devices: list[BluetoothServiceInfoBleak] = []
|
||||
|
||||
@@ -1,16 +1,11 @@
|
||||
"""Constants for Airthings BLE."""
|
||||
|
||||
from airthings_ble import AirthingsDeviceType
|
||||
|
||||
DOMAIN = "airthings_ble"
|
||||
MFCT_ID = 820
|
||||
|
||||
VOLUME_BECQUEREL = "Bq/m³"
|
||||
VOLUME_PICOCURIE = "pCi/L"
|
||||
|
||||
DEVICE_MODEL = "device_model"
|
||||
|
||||
DEFAULT_SCAN_INTERVAL = 300
|
||||
DEVICE_SPECIFIC_SCAN_INTERVAL = {AirthingsDeviceType.CORENTIUM_HOME_2.value: 1800}
|
||||
|
||||
MAX_RETRIES_AFTER_STARTUP = 5
|
||||
|
||||
@@ -16,12 +16,7 @@ from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util.unit_system import METRIC_SYSTEM
|
||||
|
||||
from .const import (
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DEVICE_MODEL,
|
||||
DEVICE_SPECIFIC_SCAN_INTERVAL,
|
||||
DOMAIN,
|
||||
)
|
||||
from .const import DEFAULT_SCAN_INTERVAL, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -39,18 +34,12 @@ class AirthingsBLEDataUpdateCoordinator(DataUpdateCoordinator[AirthingsDevice]):
|
||||
self.airthings = AirthingsBluetoothDeviceData(
|
||||
_LOGGER, hass.config.units is METRIC_SYSTEM
|
||||
)
|
||||
|
||||
device_model = entry.data.get(DEVICE_MODEL)
|
||||
interval = DEVICE_SPECIFIC_SCAN_INTERVAL.get(
|
||||
device_model, DEFAULT_SCAN_INTERVAL
|
||||
)
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(seconds=interval),
|
||||
update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL),
|
||||
)
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
@@ -69,29 +58,11 @@ class AirthingsBLEDataUpdateCoordinator(DataUpdateCoordinator[AirthingsDevice]):
|
||||
)
|
||||
self.ble_device = ble_device
|
||||
|
||||
if DEVICE_MODEL not in self.config_entry.data:
|
||||
_LOGGER.debug("Fetching device info for migration")
|
||||
try:
|
||||
data = await self.airthings.update_device(self.ble_device)
|
||||
except Exception as err:
|
||||
raise UpdateFailed(
|
||||
f"Unable to fetch data for migration: {err}"
|
||||
) from err
|
||||
|
||||
self.hass.config_entries.async_update_entry(
|
||||
self.config_entry,
|
||||
data={**self.config_entry.data, DEVICE_MODEL: data.model.value},
|
||||
)
|
||||
self.update_interval = timedelta(
|
||||
seconds=DEVICE_SPECIFIC_SCAN_INTERVAL.get(
|
||||
data.model.value, DEFAULT_SCAN_INTERVAL
|
||||
)
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> AirthingsDevice:
|
||||
"""Get data from Airthings BLE."""
|
||||
try:
|
||||
data = await self.airthings.update_device(self.ble_device)
|
||||
except Exception as err:
|
||||
raise UpdateFailed(f"Unable to fetch data: {err}") from err
|
||||
|
||||
return data
|
||||
|
||||
@@ -6,8 +6,8 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Final
|
||||
|
||||
from aioamazondevices.const.metadata import SENSOR_STATE_OFF
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
from aioamazondevices.const import SENSOR_STATE_OFF
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
DOMAIN as BINARY_SENSOR_DOMAIN,
|
||||
|
||||
@@ -2,13 +2,12 @@
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from aioamazondevices.api import AmazonEchoApi
|
||||
from aioamazondevices.api import AmazonDevice, AmazonEchoApi
|
||||
from aioamazondevices.exceptions import (
|
||||
CannotAuthenticate,
|
||||
CannotConnect,
|
||||
CannotRetrieveData,
|
||||
)
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
from aiohttp import ClientSession
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
|
||||
@@ -2,10 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict
|
||||
from typing import Any
|
||||
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME
|
||||
@@ -61,5 +60,5 @@ def build_device_data(device: AmazonDevice) -> dict[str, Any]:
|
||||
"online": device.online,
|
||||
"serial number": device.serial_number,
|
||||
"software version": device.software_version,
|
||||
"sensors": {key: asdict(sensor) for key, sensor in device.sensors.items()},
|
||||
"sensors": device.sensors,
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Defines a base Alexa Devices entity."""
|
||||
|
||||
from aioamazondevices.const.devices import SPEAKER_GROUP_MODEL
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
from aioamazondevices.const import SPEAKER_GROUP_MODEL
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==8.0.1"]
|
||||
"requirements": ["aioamazondevices==6.5.6"]
|
||||
}
|
||||
|
||||
@@ -6,9 +6,8 @@ from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Final
|
||||
|
||||
from aioamazondevices.api import AmazonEchoApi
|
||||
from aioamazondevices.const.devices import SPEAKER_GROUP_FAMILY
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
from aioamazondevices.api import AmazonDevice, AmazonEchoApi
|
||||
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
|
||||
|
||||
from homeassistant.components.notify import NotifyEntity, NotifyEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
@@ -7,12 +7,12 @@ from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Final
|
||||
|
||||
from aioamazondevices.const.schedules import (
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
from aioamazondevices.const import (
|
||||
NOTIFICATION_ALARM,
|
||||
NOTIFICATION_REMINDER,
|
||||
NOTIFICATION_TIMER,
|
||||
)
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Support for services."""
|
||||
|
||||
from aioamazondevices.const.sounds import SOUNDS_LIST
|
||||
from aioamazondevices.sounds import SOUNDS_LIST
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
|
||||
@@ -6,7 +6,7 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any, Final
|
||||
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
|
||||
from homeassistant.components.switch import (
|
||||
DOMAIN as SWITCH_DOMAIN,
|
||||
|
||||
@@ -4,7 +4,7 @@ from collections.abc import Awaitable, Callable, Coroutine
|
||||
from functools import wraps
|
||||
from typing import Any, Concatenate
|
||||
|
||||
from aioamazondevices.const.devices import SPEAKER_GROUP_FAMILY
|
||||
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
|
||||
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
||||
|
||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||
|
||||
@@ -9,14 +9,14 @@ from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import CONF_SITE_ID, DOMAIN, PLATFORMS
|
||||
from .coordinator import AmberConfigEntry, AmberUpdateCoordinator
|
||||
from .services import async_setup_services
|
||||
from .services import setup_services
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Amber component."""
|
||||
async_setup_services(hass)
|
||||
setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -10,7 +10,6 @@ from homeassistant.core import (
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers.selector import ConfigEntrySelector
|
||||
@@ -103,8 +102,7 @@ def get_forecasts(channel_type: str, data: dict) -> list[JsonValueType]:
|
||||
return results
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
def setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the Amber integration."""
|
||||
|
||||
async def handle_get_forecasts(call: ServiceCall) -> ServiceResponse:
|
||||
|
||||
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/bang_olufsen",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["mozart-api==5.1.0.247.1"],
|
||||
"requirements": ["mozart-api==4.1.1.116.4"],
|
||||
"zeroconf": ["_bangolufsen._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/blue_current",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["bluecurrent_api"],
|
||||
"requirements": ["bluecurrent-api==1.3.2"]
|
||||
"requirements": ["bluecurrent-api==1.3.1"]
|
||||
}
|
||||
|
||||
@@ -99,12 +99,6 @@ def deserialize_entity_description(
|
||||
descriptions_class = descriptions_class._dataclass # noqa: SLF001
|
||||
for field in cached_fields(descriptions_class):
|
||||
field_name = field.name
|
||||
# Only set fields that are in the data
|
||||
# otherwise we would override default values with None
|
||||
# causing side effects
|
||||
if field_name not in data:
|
||||
continue
|
||||
|
||||
# It would be nice if field.type returned the actual
|
||||
# type instead of a str so we could avoid writing this
|
||||
# out, but it doesn't. If we end up using this in more
|
||||
|
||||
@@ -63,7 +63,6 @@ BINARY_SENSOR_DESCRIPTIONS = {
|
||||
),
|
||||
BTHomeBinarySensorDeviceClass.GENERIC: BinarySensorEntityDescription(
|
||||
key=BTHomeBinarySensorDeviceClass.GENERIC,
|
||||
translation_key="generic",
|
||||
),
|
||||
BTHomeBinarySensorDeviceClass.LIGHT: BinarySensorEntityDescription(
|
||||
key=BTHomeBinarySensorDeviceClass.LIGHT,
|
||||
@@ -160,7 +159,10 @@ def sensor_update_to_bluetooth_data_update(
|
||||
device_key_to_bluetooth_entity_key(device_key): sensor_values.native_value
|
||||
for device_key, sensor_values in sensor_update.binary_entity_values.items()
|
||||
},
|
||||
entity_names={},
|
||||
entity_names={
|
||||
device_key_to_bluetooth_entity_key(device_key): sensor_values.name
|
||||
for device_key, sensor_values in sensor_update.binary_entity_values.items()
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -59,7 +59,6 @@ SENSOR_DESCRIPTIONS = {
|
||||
key=f"{BTHomeSensorDeviceClass.ACCELERATION}_{Units.ACCELERATION_METERS_PER_SQUARE_SECOND}",
|
||||
native_unit_of_measurement=Units.ACCELERATION_METERS_PER_SQUARE_SECOND,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="acceleration",
|
||||
),
|
||||
# Battery (percent)
|
||||
(BTHomeSensorDeviceClass.BATTERY, Units.PERCENTAGE): SensorEntityDescription(
|
||||
@@ -73,7 +72,6 @@ SENSOR_DESCRIPTIONS = {
|
||||
(BTHomeExtendedSensorDeviceClass.CHANNEL, None): SensorEntityDescription(
|
||||
key=str(BTHomeExtendedSensorDeviceClass.CHANNEL),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="channel",
|
||||
),
|
||||
# Conductivity (μS/cm)
|
||||
(
|
||||
@@ -89,7 +87,6 @@ SENSOR_DESCRIPTIONS = {
|
||||
(BTHomeSensorDeviceClass.COUNT, None): SensorEntityDescription(
|
||||
key=str(BTHomeSensorDeviceClass.COUNT),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="count",
|
||||
),
|
||||
# CO2 (parts per million)
|
||||
(
|
||||
@@ -117,14 +114,12 @@ SENSOR_DESCRIPTIONS = {
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="dew_point",
|
||||
),
|
||||
# Directions (°)
|
||||
(BTHomeExtendedSensorDeviceClass.DIRECTION, Units.DEGREE): SensorEntityDescription(
|
||||
key=f"{BTHomeExtendedSensorDeviceClass.DIRECTION}_{Units.DEGREE}",
|
||||
native_unit_of_measurement=DEGREE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="direction",
|
||||
),
|
||||
# Distance (mm)
|
||||
(
|
||||
@@ -178,7 +173,6 @@ SENSOR_DESCRIPTIONS = {
|
||||
key=f"{BTHomeSensorDeviceClass.GYROSCOPE}_{Units.GYROSCOPE_DEGREES_PER_SECOND}",
|
||||
native_unit_of_measurement=Units.GYROSCOPE_DEGREES_PER_SECOND,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="gyroscope",
|
||||
),
|
||||
# Humidity in (percent)
|
||||
(BTHomeSensorDeviceClass.HUMIDITY, Units.PERCENTAGE): SensorEntityDescription(
|
||||
@@ -221,7 +215,6 @@ SENSOR_DESCRIPTIONS = {
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
translation_key="packet_id",
|
||||
),
|
||||
# PM10 (μg/m3)
|
||||
(
|
||||
@@ -270,14 +263,12 @@ SENSOR_DESCRIPTIONS = {
|
||||
# Raw (-)
|
||||
(BTHomeExtendedSensorDeviceClass.RAW, None): SensorEntityDescription(
|
||||
key=str(BTHomeExtendedSensorDeviceClass.RAW),
|
||||
translation_key="raw",
|
||||
),
|
||||
# Rotation (°)
|
||||
(BTHomeSensorDeviceClass.ROTATION, Units.DEGREE): SensorEntityDescription(
|
||||
key=f"{BTHomeSensorDeviceClass.ROTATION}_{Units.DEGREE}",
|
||||
native_unit_of_measurement=DEGREE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="rotation",
|
||||
),
|
||||
# Rotational speed (rpm)
|
||||
(
|
||||
@@ -287,7 +278,6 @@ SENSOR_DESCRIPTIONS = {
|
||||
key=f"{BTHomeExtendedSensorDeviceClass.ROTATIONAL_SPEED}_{Units.REVOLUTIONS_PER_MINUTE}",
|
||||
native_unit_of_measurement=REVOLUTIONS_PER_MINUTE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="rotational_speed",
|
||||
),
|
||||
# Signal Strength (RSSI) (dB)
|
||||
(
|
||||
@@ -321,7 +311,6 @@ SENSOR_DESCRIPTIONS = {
|
||||
# Text (-)
|
||||
(BTHomeExtendedSensorDeviceClass.TEXT, None): SensorEntityDescription(
|
||||
key=str(BTHomeExtendedSensorDeviceClass.TEXT),
|
||||
translation_key="text",
|
||||
),
|
||||
# Timestamp (datetime object)
|
||||
(
|
||||
@@ -338,7 +327,6 @@ SENSOR_DESCRIPTIONS = {
|
||||
): SensorEntityDescription(
|
||||
key=str(BTHomeSensorDeviceClass.UV_INDEX),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="uv_index",
|
||||
),
|
||||
# Volatile organic Compounds (VOC) (μg/m3)
|
||||
(
|
||||
@@ -435,7 +423,10 @@ def sensor_update_to_bluetooth_data_update(
|
||||
)
|
||||
for device_key, sensor_values in sensor_update.entity_values.items()
|
||||
},
|
||||
entity_names={},
|
||||
entity_names={
|
||||
device_key_to_bluetooth_entity_key(device_key): sensor_values.name
|
||||
for device_key, sensor_values in sensor_update.entity_values.items()
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -47,11 +47,6 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"generic": {
|
||||
"name": "Generic"
|
||||
}
|
||||
},
|
||||
"event": {
|
||||
"button": {
|
||||
"state_attributes": {
|
||||
@@ -78,44 +73,6 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"acceleration": {
|
||||
"name": "Acceleration"
|
||||
},
|
||||
"channel": {
|
||||
"name": "Channel"
|
||||
},
|
||||
"count": {
|
||||
"name": "Count"
|
||||
},
|
||||
"dew_point": {
|
||||
"name": "Dew point"
|
||||
},
|
||||
"direction": {
|
||||
"name": "Direction"
|
||||
},
|
||||
"gyroscope": {
|
||||
"name": "Gyroscope"
|
||||
},
|
||||
"packet_id": {
|
||||
"name": "Packet ID"
|
||||
},
|
||||
"raw": {
|
||||
"name": "Raw"
|
||||
},
|
||||
"rotation": {
|
||||
"name": "Rotation"
|
||||
},
|
||||
"rotational_speed": {
|
||||
"name": "Rotational speed"
|
||||
},
|
||||
"text": {
|
||||
"name": "Text"
|
||||
},
|
||||
"uv_index": {
|
||||
"name": "UV Index"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==3.4.0", "home-assistant-intents==2025.11.7"]
|
||||
"requirements": ["hassil==3.4.0", "home-assistant-intents==2025.10.28"]
|
||||
}
|
||||
|
||||
@@ -5,7 +5,6 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from collections.abc import Mapping
|
||||
from functools import partial
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from devolo_home_control_api.exceptions.gateway import GatewayOfflineError
|
||||
@@ -23,8 +22,6 @@ from .const import DOMAIN, PLATFORMS
|
||||
|
||||
type DevoloHomeControlConfigEntry = ConfigEntry[list[HomeControl]]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: DevoloHomeControlConfigEntry
|
||||
@@ -47,29 +44,26 @@ async def async_setup_entry(
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, shutdown)
|
||||
)
|
||||
|
||||
zeroconf_instance = await zeroconf.async_get_instance(hass)
|
||||
entry.runtime_data = []
|
||||
offline_gateways = 0
|
||||
for gateway_id in gateway_ids:
|
||||
try:
|
||||
try:
|
||||
zeroconf_instance = await zeroconf.async_get_instance(hass)
|
||||
entry.runtime_data = []
|
||||
for gateway_id in gateway_ids:
|
||||
entry.runtime_data.append(
|
||||
await hass.async_add_executor_job(
|
||||
partial(
|
||||
HomeControl,
|
||||
gateway_id=gateway_id,
|
||||
gateway_id=str(gateway_id),
|
||||
mydevolo_instance=mydevolo,
|
||||
zeroconf_instance=zeroconf_instance,
|
||||
)
|
||||
)
|
||||
)
|
||||
except GatewayOfflineError:
|
||||
offline_gateways += 1
|
||||
_LOGGER.info("Central unit %s cannot be reached locally", gateway_id)
|
||||
if len(gateway_ids) == offline_gateways:
|
||||
except GatewayOfflineError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="connection_failed",
|
||||
)
|
||||
translation_placeholders={"gateway_id": gateway_id},
|
||||
) from err
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/devolo_home_control",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["HomeControl", "Mydevolo", "MprmRest", "MprmWebsocket", "Mprm"],
|
||||
"loggers": ["devolo_home_control_api"],
|
||||
"requirements": ["devolo-home-control-api==0.19.0"],
|
||||
"zeroconf": ["_dvl-deviceapi._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -58,7 +58,7 @@
|
||||
},
|
||||
"exceptions": {
|
||||
"connection_failed": {
|
||||
"message": "Failed to connect to any devolo Home Control central unit."
|
||||
"message": "Failed to connect to devolo Home Control central unit {gateway_id}."
|
||||
},
|
||||
"invalid_auth": {
|
||||
"message": "Authentication failed. Please re-authenticate with your mydevolo account."
|
||||
|
||||
@@ -11,7 +11,7 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN
|
||||
from .services import async_setup_services
|
||||
from .services import async_register_services
|
||||
|
||||
PLATFORMS = [Platform.NOTIFY, Platform.SENSOR]
|
||||
|
||||
@@ -20,7 +20,7 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the file component."""
|
||||
async_setup_services(hass)
|
||||
async_register_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -6,29 +6,29 @@ import json
|
||||
import voluptuous as vol
|
||||
import yaml
|
||||
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, SupportsResponse, callback
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, SupportsResponse
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import ATTR_FILE_ENCODING, ATTR_FILE_NAME, DOMAIN, SERVICE_READ_FILE
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
def async_register_services(hass: HomeAssistant) -> None:
|
||||
"""Register services for File integration."""
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_READ_FILE,
|
||||
read_file,
|
||||
schema=vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_FILE_NAME): cv.string,
|
||||
vol.Required(ATTR_FILE_ENCODING): cv.string,
|
||||
}
|
||||
),
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
if not hass.services.has_service(DOMAIN, SERVICE_READ_FILE):
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_READ_FILE,
|
||||
read_file,
|
||||
schema=vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_FILE_NAME): cv.string,
|
||||
vol.Required(ATTR_FILE_ENCODING): cv.string,
|
||||
}
|
||||
),
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
|
||||
ENCODING_LOADERS: dict[str, tuple[Callable, type[Exception]]] = {
|
||||
|
||||
@@ -37,7 +37,6 @@ class FoscamDeviceInfo:
|
||||
supports_speak_volume_adjustment: bool
|
||||
supports_pet_adjustment: bool
|
||||
supports_car_adjustment: bool
|
||||
supports_human_adjustment: bool
|
||||
supports_wdr_adjustment: bool
|
||||
supports_hdr_adjustment: bool
|
||||
|
||||
@@ -145,32 +144,24 @@ class FoscamCoordinator(DataUpdateCoordinator[FoscamDeviceInfo]):
|
||||
if ret_sw == 0
|
||||
else False
|
||||
)
|
||||
human_adjustment_val = (
|
||||
bool(int(software_capabilities.get("swCapabilities2")) & 128)
|
||||
if ret_sw == 0
|
||||
else False
|
||||
)
|
||||
ret_md, motion_config_val = self.session.get_motion_detect_config()
|
||||
ret_md, mothion_config_val = self.session.get_motion_detect_config()
|
||||
if pet_adjustment_val:
|
||||
is_pet_detection_on_val = (
|
||||
motion_config_val.get("petEnable") == "1" if ret_md == 0 else False
|
||||
mothion_config_val["petEnable"] == "1" if ret_md == 0 else False
|
||||
)
|
||||
else:
|
||||
is_pet_detection_on_val = False
|
||||
|
||||
if car_adjustment_val:
|
||||
is_car_detection_on_val = (
|
||||
motion_config_val.get("carEnable") == "1" if ret_md == 0 else False
|
||||
mothion_config_val["carEnable"] == "1" if ret_md == 0 else False
|
||||
)
|
||||
else:
|
||||
is_car_detection_on_val = False
|
||||
|
||||
if human_adjustment_val:
|
||||
is_human_detection_on_val = (
|
||||
motion_config_val.get("humanEnable") == "1" if ret_md == 0 else False
|
||||
)
|
||||
else:
|
||||
is_human_detection_on_val = False
|
||||
is_human_detection_on_val = (
|
||||
mothion_config_val["humanEnable"] == "1" if ret_md == 0 else False
|
||||
)
|
||||
|
||||
return FoscamDeviceInfo(
|
||||
dev_info=dev_info,
|
||||
@@ -188,7 +179,6 @@ class FoscamCoordinator(DataUpdateCoordinator[FoscamDeviceInfo]):
|
||||
supports_speak_volume_adjustment=supports_speak_volume_adjustment_val,
|
||||
supports_pet_adjustment=pet_adjustment_val,
|
||||
supports_car_adjustment=car_adjustment_val,
|
||||
supports_human_adjustment=human_adjustment_val,
|
||||
supports_hdr_adjustment=supports_hdr_adjustment_val,
|
||||
supports_wdr_adjustment=supports_wdr_adjustment_val,
|
||||
is_open_wdr=is_open_wdr,
|
||||
|
||||
@@ -143,7 +143,6 @@ SWITCH_DESCRIPTIONS: list[FoscamSwitchEntityDescription] = [
|
||||
native_value_fn=lambda data: data.is_human_detection_on,
|
||||
turn_off_fn=lambda session: set_motion_detection(session, "humanEnable", False),
|
||||
turn_on_fn=lambda session: set_motion_detection(session, "humanEnable", True),
|
||||
exists_fn=lambda coordinator: coordinator.data.supports_human_adjustment,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@@ -826,21 +826,6 @@ class AvmWrapper(FritzBoxTools):
|
||||
NewDisallow="0" if turn_on else "1",
|
||||
)
|
||||
|
||||
async def async_get_current_user_rights(self) -> dict[str, Any]:
|
||||
"""Call X_AVM-DE_GetCurrentUser service."""
|
||||
|
||||
result = await self._async_service_call(
|
||||
"LANConfigSecurity",
|
||||
"1",
|
||||
"X_AVM-DE_GetCurrentUser",
|
||||
)
|
||||
|
||||
user_rights = xmltodict.parse(result["NewX_AVM-DE_CurrentUserRights"])["rights"]
|
||||
|
||||
return {
|
||||
k: user_rights["access"][idx] for idx, k in enumerate(user_rights["path"])
|
||||
}
|
||||
|
||||
async def async_wake_on_lan(self, mac_address: str) -> dict[str, Any]:
|
||||
"""Call X_AVM-DE_WakeOnLANByMACAddress service."""
|
||||
|
||||
|
||||
@@ -35,7 +35,6 @@ async def async_get_config_entry_diagnostics(
|
||||
"last_update success": avm_wrapper.last_update_success,
|
||||
"last_exception": avm_wrapper.last_exception,
|
||||
"discovered_services": list(avm_wrapper.connection.services),
|
||||
"current_user_rights": await avm_wrapper.async_get_current_user_rights(),
|
||||
"client_devices": [
|
||||
{
|
||||
"connected_to": device.connected_to,
|
||||
|
||||
@@ -136,21 +136,6 @@ async def async_setup_entry(
|
||||
new_data[CONF_URL] = url
|
||||
hass.config_entries.async_update_entry(config_entry, data=new_data)
|
||||
|
||||
# Migrate legacy config entries without auth_type field
|
||||
if CONF_AUTH_TYPE not in config:
|
||||
new_data = dict(config_entry.data)
|
||||
# Detect auth type based on which fields are present
|
||||
if CONF_TOKEN in config:
|
||||
new_data[CONF_AUTH_TYPE] = AUTH_API_TOKEN
|
||||
elif CONF_USERNAME in config:
|
||||
new_data[CONF_AUTH_TYPE] = AUTH_PASSWORD
|
||||
else:
|
||||
raise ConfigEntryError(
|
||||
"Unable to determine authentication type from config entry."
|
||||
)
|
||||
hass.config_entries.async_update_entry(config_entry, data=new_data)
|
||||
config = config_entry.data
|
||||
|
||||
# Determine API version
|
||||
if config.get(CONF_AUTH_TYPE) == AUTH_API_TOKEN:
|
||||
api_version = "v1"
|
||||
|
||||
@@ -213,14 +213,14 @@ class HomeAssistantConnectZBT2OptionsFlowHandler(
|
||||
"""Instantiate options flow."""
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self._usb_info = get_usb_service_info(self._config_entry)
|
||||
self._usb_info = get_usb_service_info(self.config_entry)
|
||||
self._hardware_name = HARDWARE_NAME
|
||||
self._device = self._usb_info.device
|
||||
|
||||
self._probed_firmware_info = FirmwareInfo(
|
||||
device=self._device,
|
||||
firmware_type=ApplicationType(self._config_entry.data[FIRMWARE]),
|
||||
firmware_version=self._config_entry.data[FIRMWARE_VERSION],
|
||||
firmware_type=ApplicationType(self.config_entry.data[FIRMWARE]),
|
||||
firmware_version=self.config_entry.data[FIRMWARE_VERSION],
|
||||
source="guess",
|
||||
owners=[],
|
||||
)
|
||||
|
||||
@@ -28,7 +28,7 @@ from homeassistant.config_entries import (
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import AbortFlow
|
||||
from homeassistant.data_entry_flow import AbortFlow, progress_step
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
@@ -97,12 +97,6 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
self.addon_uninstall_task: asyncio.Task | None = None
|
||||
self.firmware_install_task: asyncio.Task[None] | None = None
|
||||
self.installing_firmware_name: str | None = None
|
||||
self._install_otbr_addon_task: asyncio.Task[None] | None = None
|
||||
self._start_otbr_addon_task: asyncio.Task[None] | None = None
|
||||
|
||||
# Progress flow steps cannot abort so we need to store the abort reason and then
|
||||
# re-raise it in a dedicated step
|
||||
self._progress_error: AbortFlow | None = None
|
||||
|
||||
def _get_translation_placeholders(self) -> dict[str, str]:
|
||||
"""Shared translation placeholders."""
|
||||
@@ -112,11 +106,6 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
if self._probed_firmware_info is not None
|
||||
else "unknown"
|
||||
),
|
||||
"firmware_name": (
|
||||
self.installing_firmware_name
|
||||
if self.installing_firmware_name is not None
|
||||
else "unknown"
|
||||
),
|
||||
"model": self._hardware_name,
|
||||
}
|
||||
|
||||
@@ -193,22 +182,22 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
return self.async_show_progress(
|
||||
step_id=step_id,
|
||||
progress_action="install_firmware",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
description_placeholders={
|
||||
**self._get_translation_placeholders(),
|
||||
"firmware_name": firmware_name,
|
||||
},
|
||||
progress_task=self.firmware_install_task,
|
||||
)
|
||||
|
||||
try:
|
||||
await self.firmware_install_task
|
||||
except AbortFlow as err:
|
||||
self._progress_error = err
|
||||
return self.async_show_progress_done(next_step_id="progress_failed")
|
||||
return self.async_show_progress_done(
|
||||
next_step_id=err.reason,
|
||||
)
|
||||
except HomeAssistantError:
|
||||
_LOGGER.exception("Failed to flash firmware")
|
||||
self._progress_error = AbortFlow(
|
||||
reason="fw_install_failed",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
)
|
||||
return self.async_show_progress_done(next_step_id="progress_failed")
|
||||
return self.async_show_progress_done(next_step_id="firmware_install_failed")
|
||||
finally:
|
||||
self.firmware_install_task = None
|
||||
|
||||
@@ -252,10 +241,7 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
_LOGGER.debug("Skipping firmware upgrade due to index download failure")
|
||||
return
|
||||
|
||||
raise AbortFlow(
|
||||
reason="fw_download_failed",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
) from err
|
||||
raise AbortFlow(reason="firmware_download_failed") from err
|
||||
|
||||
if not firmware_install_required:
|
||||
assert self._probed_firmware_info is not None
|
||||
@@ -284,10 +270,7 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
return
|
||||
|
||||
# Otherwise, fail
|
||||
raise AbortFlow(
|
||||
reason="fw_download_failed",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
) from err
|
||||
raise AbortFlow(reason="firmware_download_failed") from err
|
||||
|
||||
self._probed_firmware_info = await async_flash_silabs_firmware(
|
||||
hass=self.hass,
|
||||
@@ -330,6 +313,41 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
|
||||
await otbr_manager.async_start_addon_waiting()
|
||||
|
||||
async def async_step_firmware_download_failed(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Abort when firmware download failed."""
|
||||
assert self.installing_firmware_name is not None
|
||||
return self.async_abort(
|
||||
reason="fw_download_failed",
|
||||
description_placeholders={
|
||||
**self._get_translation_placeholders(),
|
||||
"firmware_name": self.installing_firmware_name,
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_firmware_install_failed(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Abort when firmware install failed."""
|
||||
assert self.installing_firmware_name is not None
|
||||
return self.async_abort(
|
||||
reason="fw_install_failed",
|
||||
description_placeholders={
|
||||
**self._get_translation_placeholders(),
|
||||
"firmware_name": self.installing_firmware_name,
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_unsupported_firmware(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Abort when unsupported firmware is detected."""
|
||||
return self.async_abort(
|
||||
reason="unsupported_firmware",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
)
|
||||
|
||||
async def async_step_zigbee_installation_type(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -493,15 +511,16 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
"""Install Thread firmware."""
|
||||
raise NotImplementedError
|
||||
|
||||
async def async_step_progress_failed(
|
||||
@progress_step(
|
||||
description_placeholders=lambda self: {
|
||||
**self._get_translation_placeholders(),
|
||||
"addon_name": get_otbr_addon_manager(self.hass).addon_name,
|
||||
}
|
||||
)
|
||||
async def async_step_install_otbr_addon(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Abort when progress step failed."""
|
||||
assert self._progress_error is not None
|
||||
raise self._progress_error
|
||||
|
||||
async def _async_install_otbr_addon(self) -> None:
|
||||
"""Do the work of installing the OTBR addon."""
|
||||
"""Show progress dialog for installing the OTBR addon."""
|
||||
addon_manager = get_otbr_addon_manager(self.hass)
|
||||
addon_info = await self._async_get_addon_info(addon_manager)
|
||||
|
||||
@@ -519,39 +538,18 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
},
|
||||
) from err
|
||||
|
||||
async def async_step_install_otbr_addon(
|
||||
return await self.async_step_finish_thread_installation()
|
||||
|
||||
@progress_step(
|
||||
description_placeholders=lambda self: {
|
||||
**self._get_translation_placeholders(),
|
||||
"addon_name": get_otbr_addon_manager(self.hass).addon_name,
|
||||
}
|
||||
)
|
||||
async def async_step_start_otbr_addon(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Show progress dialog for installing the OTBR addon."""
|
||||
if self._install_otbr_addon_task is None:
|
||||
self._install_otbr_addon_task = self.hass.async_create_task(
|
||||
self._async_install_otbr_addon(),
|
||||
"Install OTBR addon",
|
||||
)
|
||||
|
||||
if not self._install_otbr_addon_task.done():
|
||||
return self.async_show_progress(
|
||||
step_id="install_otbr_addon",
|
||||
progress_action="install_otbr_addon",
|
||||
description_placeholders={
|
||||
**self._get_translation_placeholders(),
|
||||
"addon_name": get_otbr_addon_manager(self.hass).addon_name,
|
||||
},
|
||||
progress_task=self._install_otbr_addon_task,
|
||||
)
|
||||
|
||||
try:
|
||||
await self._install_otbr_addon_task
|
||||
except AbortFlow as err:
|
||||
self._progress_error = err
|
||||
return self.async_show_progress_done(next_step_id="progress_failed")
|
||||
finally:
|
||||
self._install_otbr_addon_task = None
|
||||
|
||||
return self.async_show_progress_done(next_step_id="finish_thread_installation")
|
||||
|
||||
async def _async_start_otbr_addon(self) -> None:
|
||||
"""Do the work of starting the OTBR addon."""
|
||||
"""Configure OTBR to point to the SkyConnect and run the addon."""
|
||||
try:
|
||||
await self._configure_and_start_otbr_addon()
|
||||
except AddonError as err:
|
||||
@@ -564,36 +562,7 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
},
|
||||
) from err
|
||||
|
||||
async def async_step_start_otbr_addon(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Configure OTBR to point to the SkyConnect and run the addon."""
|
||||
if self._start_otbr_addon_task is None:
|
||||
self._start_otbr_addon_task = self.hass.async_create_task(
|
||||
self._async_start_otbr_addon(),
|
||||
"Start OTBR addon",
|
||||
)
|
||||
|
||||
if not self._start_otbr_addon_task.done():
|
||||
return self.async_show_progress(
|
||||
step_id="start_otbr_addon",
|
||||
progress_action="start_otbr_addon",
|
||||
description_placeholders={
|
||||
**self._get_translation_placeholders(),
|
||||
"addon_name": get_otbr_addon_manager(self.hass).addon_name,
|
||||
},
|
||||
progress_task=self._start_otbr_addon_task,
|
||||
)
|
||||
|
||||
try:
|
||||
await self._start_otbr_addon_task
|
||||
except AbortFlow as err:
|
||||
self._progress_error = err
|
||||
return self.async_show_progress_done(next_step_id="progress_failed")
|
||||
finally:
|
||||
self._start_otbr_addon_task = None
|
||||
|
||||
return self.async_show_progress_done(next_step_id="pre_confirm_otbr")
|
||||
return await self.async_step_pre_confirm_otbr()
|
||||
|
||||
async def async_step_pre_confirm_otbr(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
|
||||
@@ -275,17 +275,17 @@ class HomeAssistantSkyConnectOptionsFlowHandler(
|
||||
"""Instantiate options flow."""
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self._usb_info = get_usb_service_info(self._config_entry)
|
||||
self._usb_info = get_usb_service_info(self.config_entry)
|
||||
self._hw_variant = HardwareVariant.from_usb_product_name(
|
||||
self._config_entry.data[PRODUCT]
|
||||
self.config_entry.data[PRODUCT]
|
||||
)
|
||||
self._hardware_name = self._hw_variant.full_name
|
||||
self._device = self._usb_info.device
|
||||
|
||||
self._probed_firmware_info = FirmwareInfo(
|
||||
device=self._device,
|
||||
firmware_type=ApplicationType(self._config_entry.data[FIRMWARE]),
|
||||
firmware_version=self._config_entry.data[FIRMWARE_VERSION],
|
||||
firmware_type=ApplicationType(self.config_entry.data[FIRMWARE]),
|
||||
firmware_version=self.config_entry.data[FIRMWARE_VERSION],
|
||||
source="guess",
|
||||
owners=[],
|
||||
)
|
||||
|
||||
@@ -348,7 +348,7 @@ class HomeAssistantYellowOptionsFlowHandler(
|
||||
|
||||
self._probed_firmware_info = FirmwareInfo(
|
||||
device=self._device,
|
||||
firmware_type=ApplicationType(self._config_entry.data["firmware"]),
|
||||
firmware_type=ApplicationType(self.config_entry.data["firmware"]),
|
||||
firmware_version=None,
|
||||
source="guess",
|
||||
owners=[],
|
||||
|
||||
@@ -108,7 +108,6 @@ _DEFAULT_BIND = ["0.0.0.0", "::"] if _HAS_IPV6 else ["0.0.0.0"]
|
||||
|
||||
HTTP_SCHEMA: Final = vol.All(
|
||||
cv.deprecated(CONF_BASE_URL),
|
||||
cv.deprecated(CONF_SERVER_HOST), # Deprecated in HA Core 2025.12
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_SERVER_HOST): vol.All(
|
||||
@@ -209,21 +208,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
if conf is None:
|
||||
conf = cast(ConfData, HTTP_SCHEMA({}))
|
||||
|
||||
if CONF_SERVER_HOST in conf:
|
||||
if is_hassio(hass):
|
||||
issue_id = "server_host_deprecated_hassio"
|
||||
severity = ir.IssueSeverity.ERROR
|
||||
else:
|
||||
issue_id = "server_host_deprecated"
|
||||
severity = ir.IssueSeverity.WARNING
|
||||
if CONF_SERVER_HOST in conf and is_hassio(hass):
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
issue_id,
|
||||
breaks_in_ha_version="2026.6.0",
|
||||
"server_host_may_break_hassio",
|
||||
is_fixable=False,
|
||||
severity=severity,
|
||||
translation_key=issue_id,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
translation_key="server_host_may_break_hassio",
|
||||
)
|
||||
|
||||
server_host = conf.get(CONF_SERVER_HOST, _DEFAULT_BIND)
|
||||
|
||||
@@ -1,11 +1,7 @@
|
||||
{
|
||||
"issues": {
|
||||
"server_host_deprecated": {
|
||||
"description": "The `server_host` configuration option in the HTTP integration is deprecated and will be removed.\n\nIf you are using this option to bind Home Assistant to specific network interfaces, please remove it from your configuration. Home Assistant will automatically bind to all available interfaces by default.\n\nIf you have specific networking requirements, consider using firewall rules or other network configuration to control access to Home Assistant.",
|
||||
"title": "The `server_host` HTTP configuration option is deprecated"
|
||||
},
|
||||
"server_host_deprecated_hassio": {
|
||||
"description": "The deprecated `server_host` configuration option in the HTTP integration is prone to break the communication between Home Assistant Core and Supervisor, and will be removed.\n\nIf you are using this option to bind Home Assistant to specific network interfaces, please remove it from your configuration. Home Assistant will automatically bind to all available interfaces by default.\n\nIf you have specific networking requirements, consider using firewall rules or other network configuration to control access to Home Assistant.",
|
||||
"server_host_may_break_hassio": {
|
||||
"description": "The `server_host` configuration option in the HTTP integration is prone to break the communication between Home Assistant Core and Supervisor, and will be removed in a future release.\n\nIf you are using this option to bind Home Assistant to specific network interfaces, please remove it from your configuration. Home Assistant will automatically bind to all available interfaces by default.\n\nIf you have specific networking requirements, consider using firewall rules or other network configuration to control access to Home Assistant.",
|
||||
"title": "The `server_host` HTTP configuration may break Home Assistant Core - Supervisor communication"
|
||||
},
|
||||
"ssl_configured_without_configured_urls": {
|
||||
|
||||
@@ -15,5 +15,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pynecil"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pynecil==4.2.1"]
|
||||
"requirements": ["pynecil==4.2.0"]
|
||||
}
|
||||
|
||||
@@ -359,7 +359,7 @@ CLIMATE_KNX_SCHEMA = vol.Schema(
|
||||
write=False, state_required=True, valid_dpt="9.001"
|
||||
),
|
||||
vol.Optional(CONF_GA_HUMIDITY_CURRENT): GASelector(
|
||||
write=False, valid_dpt="9.007"
|
||||
write=False, valid_dpt="9.002"
|
||||
),
|
||||
vol.Required(CONF_TARGET_TEMPERATURE): GroupSelect(
|
||||
GroupSelectOption(
|
||||
|
||||
@@ -221,7 +221,7 @@ async def library_payload(hass):
|
||||
for child in library_info.children:
|
||||
child.thumbnail = "https://brands.home-assistant.io/_/kodi/logo.png"
|
||||
|
||||
with contextlib.suppress(BrowseError):
|
||||
with contextlib.suppress(media_source.BrowseError):
|
||||
item = await media_source.async_browse_media(
|
||||
hass, None, content_filter=media_source_content_filter
|
||||
)
|
||||
|
||||
@@ -25,7 +25,7 @@ from homeassistant.util import slugify
|
||||
|
||||
from .const import CONF_BASE_URL, DOMAIN, LOGGER
|
||||
from .coordinator import MastodonConfigEntry, MastodonCoordinator, MastodonData
|
||||
from .services import async_setup_services
|
||||
from .services import setup_services
|
||||
from .utils import construct_mastodon_username, create_mastodon_client
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
@@ -35,7 +35,7 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Mastodon component."""
|
||||
async_setup_services(hass)
|
||||
setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import ATTR_CONFIG_ENTRY_ID
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, ServiceResponse, callback
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, ServiceResponse
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
|
||||
from .const import (
|
||||
@@ -68,8 +68,7 @@ def async_get_entry(hass: HomeAssistant, config_entry_id: str) -> MastodonConfig
|
||||
return cast(MastodonConfigEntry, entry)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
def setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the Mastodon integration."""
|
||||
|
||||
async def async_post(call: ServiceCall) -> ServiceResponse:
|
||||
|
||||
@@ -41,9 +41,6 @@
|
||||
"energy_forecast": {
|
||||
"default": "mdi:lightning-bolt-outline"
|
||||
},
|
||||
"finish": {
|
||||
"default": "mdi:clock-end"
|
||||
},
|
||||
"plate": {
|
||||
"default": "mdi:circle-outline",
|
||||
"state": {
|
||||
@@ -86,9 +83,6 @@
|
||||
"spin_speed": {
|
||||
"default": "mdi:sync"
|
||||
},
|
||||
"start": {
|
||||
"default": "mdi:clock-start"
|
||||
},
|
||||
"start_time": {
|
||||
"default": "mdi:clock-start"
|
||||
},
|
||||
|
||||
@@ -4,7 +4,6 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Mapping
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from typing import Any, Final, cast
|
||||
|
||||
@@ -30,7 +29,6 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import (
|
||||
COFFEE_SYSTEM_PROFILE,
|
||||
@@ -104,47 +102,12 @@ def _get_coffee_profile(value: MieleDevice) -> str | None:
|
||||
return None
|
||||
|
||||
|
||||
def _convert_start_timestamp(
|
||||
elapsed_time_list: list[int], start_time_list: list[int]
|
||||
) -> datetime | None:
|
||||
"""Convert raw values representing time into start timestamp."""
|
||||
now = dt_util.utcnow()
|
||||
elapsed_duration = _convert_duration(elapsed_time_list)
|
||||
delayed_start_duration = _convert_duration(start_time_list)
|
||||
if (elapsed_duration is None or elapsed_duration == 0) and (
|
||||
delayed_start_duration is None or delayed_start_duration == 0
|
||||
):
|
||||
return None
|
||||
if elapsed_duration is not None and elapsed_duration > 0:
|
||||
duration = -elapsed_duration
|
||||
elif delayed_start_duration is not None and delayed_start_duration > 0:
|
||||
duration = delayed_start_duration
|
||||
delta = timedelta(minutes=duration)
|
||||
return (now + delta).replace(second=0, microsecond=0)
|
||||
|
||||
|
||||
def _convert_finish_timestamp(
|
||||
remaining_time_list: list[int], start_time_list: list[int]
|
||||
) -> datetime | None:
|
||||
"""Convert raw values representing time into finish timestamp."""
|
||||
now = dt_util.utcnow()
|
||||
program_duration = _convert_duration(remaining_time_list)
|
||||
delayed_start_duration = _convert_duration(start_time_list)
|
||||
if program_duration is None or program_duration == 0:
|
||||
return None
|
||||
duration = program_duration + (
|
||||
delayed_start_duration if delayed_start_duration is not None else 0
|
||||
)
|
||||
delta = timedelta(minutes=duration)
|
||||
return (now + delta).replace(second=0, microsecond=0)
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class MieleSensorDescription(SensorEntityDescription):
|
||||
"""Class describing Miele sensor entities."""
|
||||
|
||||
value_fn: Callable[[MieleDevice], StateType | datetime]
|
||||
end_value_fn: Callable[[StateType | datetime], StateType | datetime] | None = None
|
||||
value_fn: Callable[[MieleDevice], StateType]
|
||||
end_value_fn: Callable[[StateType], StateType] | None = None
|
||||
extra_attributes: dict[str, Callable[[MieleDevice], StateType]] | None = None
|
||||
zone: int | None = None
|
||||
unique_id_fn: Callable[[str, MieleSensorDescription], str] | None = None
|
||||
@@ -465,60 +428,6 @@ SENSOR_TYPES: Final[tuple[MieleSensorDefinition, ...]] = (
|
||||
suggested_unit_of_measurement=UnitOfTime.HOURS,
|
||||
),
|
||||
),
|
||||
MieleSensorDefinition(
|
||||
types=(
|
||||
MieleAppliance.WASHING_MACHINE,
|
||||
MieleAppliance.WASHING_MACHINE_SEMI_PROFESSIONAL,
|
||||
MieleAppliance.TUMBLE_DRYER,
|
||||
MieleAppliance.TUMBLE_DRYER_SEMI_PROFESSIONAL,
|
||||
MieleAppliance.DISHWASHER,
|
||||
MieleAppliance.OVEN,
|
||||
MieleAppliance.OVEN_MICROWAVE,
|
||||
MieleAppliance.STEAM_OVEN,
|
||||
MieleAppliance.MICROWAVE,
|
||||
MieleAppliance.ROBOT_VACUUM_CLEANER,
|
||||
MieleAppliance.WASHER_DRYER,
|
||||
MieleAppliance.STEAM_OVEN_COMBI,
|
||||
MieleAppliance.STEAM_OVEN_MICRO,
|
||||
MieleAppliance.DIALOG_OVEN,
|
||||
MieleAppliance.STEAM_OVEN_MK2,
|
||||
),
|
||||
description=MieleSensorDescription(
|
||||
key="state_finish_timestamp",
|
||||
translation_key="finish",
|
||||
value_fn=lambda value: _convert_finish_timestamp(
|
||||
value.state_remaining_time, value.state_start_time
|
||||
),
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
),
|
||||
MieleSensorDefinition(
|
||||
types=(
|
||||
MieleAppliance.WASHING_MACHINE,
|
||||
MieleAppliance.TUMBLE_DRYER,
|
||||
MieleAppliance.DISHWASHER,
|
||||
MieleAppliance.OVEN,
|
||||
MieleAppliance.OVEN_MICROWAVE,
|
||||
MieleAppliance.STEAM_OVEN,
|
||||
MieleAppliance.MICROWAVE,
|
||||
MieleAppliance.WASHER_DRYER,
|
||||
MieleAppliance.STEAM_OVEN_COMBI,
|
||||
MieleAppliance.STEAM_OVEN_MICRO,
|
||||
MieleAppliance.DIALOG_OVEN,
|
||||
MieleAppliance.ROBOT_VACUUM_CLEANER,
|
||||
MieleAppliance.STEAM_OVEN_MK2,
|
||||
),
|
||||
description=MieleSensorDescription(
|
||||
key="state_start_timestamp",
|
||||
translation_key="start",
|
||||
value_fn=lambda value: _convert_start_timestamp(
|
||||
value.state_elapsed_time, value.state_start_time
|
||||
),
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
),
|
||||
MieleSensorDefinition(
|
||||
types=(
|
||||
MieleAppliance.TUMBLE_DRYER_SEMI_PROFESSIONAL,
|
||||
@@ -711,8 +620,6 @@ async def async_setup_entry(
|
||||
"state_elapsed_time": MieleTimeSensor,
|
||||
"state_remaining_time": MieleTimeSensor,
|
||||
"state_start_time": MieleTimeSensor,
|
||||
"state_start_timestamp": MieleAbsoluteTimeSensor,
|
||||
"state_finish_timestamp": MieleAbsoluteTimeSensor,
|
||||
"current_energy_consumption": MieleConsumptionSensor,
|
||||
"current_water_consumption": MieleConsumptionSensor,
|
||||
}.get(definition.description.key, MieleSensor)
|
||||
@@ -836,7 +743,7 @@ class MieleSensor(MieleEntity, SensorEntity):
|
||||
self._attr_unique_id = description.unique_id_fn(device_id, description)
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType | datetime:
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(self.device)
|
||||
|
||||
@@ -854,7 +761,7 @@ class MieleSensor(MieleEntity, SensorEntity):
|
||||
class MieleRestorableSensor(MieleSensor, RestoreSensor):
|
||||
"""Representation of a Sensor whose internal state can be restored."""
|
||||
|
||||
_attr_native_value: StateType | datetime
|
||||
_attr_native_value: StateType
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""When entity is added to hass."""
|
||||
@@ -866,7 +773,7 @@ class MieleRestorableSensor(MieleSensor, RestoreSensor):
|
||||
self._attr_native_value = last_data.native_value # type: ignore[assignment]
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType | datetime:
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor.
|
||||
|
||||
It is necessary to override `native_value` to fall back to the default
|
||||
@@ -1027,40 +934,6 @@ class MieleTimeSensor(MieleRestorableSensor):
|
||||
self._attr_native_value = current_value
|
||||
|
||||
|
||||
class MieleAbsoluteTimeSensor(MieleRestorableSensor):
|
||||
"""Representation of absolute time sensors handling precision correctness."""
|
||||
|
||||
_previous_value: StateType | datetime = None
|
||||
|
||||
def _update_native_value(self) -> None:
|
||||
"""Update the last value of the sensor."""
|
||||
current_value = self.entity_description.value_fn(self.device)
|
||||
current_status = StateStatus(self.device.state_status)
|
||||
|
||||
# The API reports with minute precision, to avoid changing
|
||||
# the value too often, we keep the cached value if it differs
|
||||
# less than 90s from the new value
|
||||
if (
|
||||
isinstance(self._previous_value, datetime)
|
||||
and isinstance(current_value, datetime)
|
||||
and (
|
||||
self._previous_value - timedelta(seconds=90)
|
||||
< current_value
|
||||
< self._previous_value + timedelta(seconds=90)
|
||||
)
|
||||
) or current_status == StateStatus.PROGRAM_ENDED:
|
||||
return
|
||||
|
||||
# force unknown when appliance is not working (some devices are keeping last value until a new cycle starts)
|
||||
if current_status in (StateStatus.OFF, StateStatus.ON, StateStatus.IDLE):
|
||||
self._attr_native_value = None
|
||||
|
||||
# otherwise, cache value and return it
|
||||
else:
|
||||
self._attr_native_value = current_value
|
||||
self._previous_value = current_value
|
||||
|
||||
|
||||
class MieleConsumptionSensor(MieleRestorableSensor):
|
||||
"""Representation of consumption sensors keeping state from cache."""
|
||||
|
||||
@@ -1070,19 +943,13 @@ class MieleConsumptionSensor(MieleRestorableSensor):
|
||||
"""Update the last value of the sensor."""
|
||||
current_value = self.entity_description.value_fn(self.device)
|
||||
current_status = StateStatus(self.device.state_status)
|
||||
# Guard for corrupt restored value
|
||||
restored_value = (
|
||||
self._attr_native_value
|
||||
if isinstance(self._attr_native_value, (int, float))
|
||||
else 0
|
||||
)
|
||||
last_value = (
|
||||
float(cast(str, restored_value))
|
||||
float(cast(str, self._attr_native_value))
|
||||
if self._attr_native_value is not None
|
||||
else 0
|
||||
)
|
||||
|
||||
# Force unknown when appliance is not able to report consumption
|
||||
# force unknown when appliance is not able to report consumption
|
||||
if current_status in (
|
||||
StateStatus.ON,
|
||||
StateStatus.OFF,
|
||||
|
||||
@@ -216,9 +216,6 @@
|
||||
"energy_forecast": {
|
||||
"name": "Energy forecast"
|
||||
},
|
||||
"finish": {
|
||||
"name": "Finish"
|
||||
},
|
||||
"plate": {
|
||||
"name": "Plate {plate_no}",
|
||||
"state": {
|
||||
@@ -1018,9 +1015,6 @@
|
||||
"spin_speed": {
|
||||
"name": "Spin speed"
|
||||
},
|
||||
"start": {
|
||||
"name": "Start"
|
||||
},
|
||||
"start_time": {
|
||||
"name": "Start in"
|
||||
},
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
"""Constants for the NOAA Tides integration."""
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
CONF_STATION_ID = "station_id"
|
||||
|
||||
DEFAULT_NAME = "NOAA Tides"
|
||||
DEFAULT_PREDICTION_LENGTH = timedelta(days=2)
|
||||
DEFAULT_TIMEZONE = "lst_ldt"
|
||||
|
||||
ATTRIBUTION = "Data provided by NOAA"
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Literal, TypedDict
|
||||
|
||||
@@ -22,13 +22,6 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util.unit_system import METRIC_SYSTEM
|
||||
|
||||
from .const import (
|
||||
ATTRIBUTION,
|
||||
CONF_STATION_ID,
|
||||
DEFAULT_NAME,
|
||||
DEFAULT_PREDICTION_LENGTH,
|
||||
DEFAULT_TIMEZONE,
|
||||
)
|
||||
from .helpers import get_station_unique_id
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -36,6 +29,13 @@ if TYPE_CHECKING:
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_STATION_ID = "station_id"
|
||||
|
||||
DEFAULT_NAME = "NOAA Tides"
|
||||
DEFAULT_TIMEZONE = "lst_ldt"
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=60)
|
||||
|
||||
TIMEZONES = ["gmt", "lst", "lst_ldt"]
|
||||
UNIT_SYSTEMS = ["english", "metric"]
|
||||
|
||||
@@ -63,9 +63,9 @@ def setup_platform(
|
||||
if CONF_UNIT_SYSTEM in config:
|
||||
unit_system = config[CONF_UNIT_SYSTEM]
|
||||
elif hass.config.units is METRIC_SYSTEM:
|
||||
unit_system = "metric"
|
||||
unit_system = UNIT_SYSTEMS[1]
|
||||
else:
|
||||
unit_system = "english"
|
||||
unit_system = UNIT_SYSTEMS[0]
|
||||
|
||||
try:
|
||||
station = coops.Station(station_id, unit_system)
|
||||
@@ -97,7 +97,7 @@ class NOAATidesData(TypedDict):
|
||||
class NOAATidesAndCurrentsSensor(SensorEntity):
|
||||
"""Representation of a NOAA Tides and Currents sensor."""
|
||||
|
||||
_attr_attribution = ATTRIBUTION
|
||||
_attr_attribution = "Data provided by NOAA"
|
||||
|
||||
def __init__(self, name, station_id, timezone, unit_system, station) -> None:
|
||||
"""Initialize the sensor."""
|
||||
@@ -141,8 +141,8 @@ class NOAATidesAndCurrentsSensor(SensorEntity):
|
||||
return attr
|
||||
|
||||
@property
|
||||
def native_value(self) -> str | None:
|
||||
"""Return the state."""
|
||||
def native_value(self):
|
||||
"""Return the state of the device."""
|
||||
if self.data is None:
|
||||
return None
|
||||
api_time = self.data["time_stamp"][0]
|
||||
@@ -157,7 +157,8 @@ class NOAATidesAndCurrentsSensor(SensorEntity):
|
||||
def update(self) -> None:
|
||||
"""Get the latest data from NOAA Tides and Currents API."""
|
||||
begin = datetime.now()
|
||||
end = begin + DEFAULT_PREDICTION_LENGTH
|
||||
delta = timedelta(days=2)
|
||||
end = begin + delta
|
||||
try:
|
||||
df_predictions = self._station.get_data(
|
||||
begin_date=begin.strftime("%Y%m%d %H:%M"),
|
||||
|
||||
@@ -10,5 +10,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["onedrive_personal_sdk"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["onedrive-personal-sdk==0.0.16"]
|
||||
"requirements": ["onedrive-personal-sdk==0.0.15"]
|
||||
}
|
||||
|
||||
@@ -208,20 +208,6 @@ SENSOR_DESCRIPTIONS: list[OverkizSensorDescription] = [
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
OverkizSensorDescription(
|
||||
key=OverkizState.IO_POWER_HEAT_PUMP,
|
||||
name="Heat pump power consumption",
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
OverkizSensorDescription(
|
||||
key=OverkizState.IO_POWER_HEAT_ELECTRICAL,
|
||||
name="Electric power consumption",
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
OverkizSensorDescription(
|
||||
key=OverkizState.CORE_CONSUMPTION_TARIFF1,
|
||||
name="Consumption tariff 1",
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["plugwise"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["plugwise==1.9.0"],
|
||||
"requirements": ["plugwise==1.8.3"],
|
||||
"zeroconf": ["_plugwise._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyportainer==1.0.14"]
|
||||
"requirements": ["pyportainer==1.0.13"]
|
||||
}
|
||||
|
||||
@@ -295,6 +295,10 @@ def async_setup_entry_rest(
|
||||
class BlockEntityDescription(EntityDescription):
|
||||
"""Class to describe a BLOCK entity."""
|
||||
|
||||
# BlockEntity does not support UNDEFINED or None,
|
||||
# restrict the type to str.
|
||||
name: str = ""
|
||||
|
||||
unit_fn: Callable[[dict], str] | None = None
|
||||
value: Callable[[Any], Any] = lambda val: val
|
||||
available: Callable[[Block], bool] | None = None
|
||||
@@ -307,6 +311,10 @@ class BlockEntityDescription(EntityDescription):
|
||||
class RpcEntityDescription(EntityDescription):
|
||||
"""Class to describe a RPC entity."""
|
||||
|
||||
# BlockEntity does not support UNDEFINED or None,
|
||||
# restrict the type to str.
|
||||
name: str = ""
|
||||
|
||||
sub_key: str | None = None
|
||||
|
||||
value: Callable[[Any, Any], Any] | None = None
|
||||
@@ -324,6 +332,10 @@ class RpcEntityDescription(EntityDescription):
|
||||
class RestEntityDescription(EntityDescription):
|
||||
"""Class to describe a REST entity."""
|
||||
|
||||
# BlockEntity does not support UNDEFINED or None,
|
||||
# restrict the type to str.
|
||||
name: str = ""
|
||||
|
||||
value: Callable[[dict, Any], Any] | None = None
|
||||
|
||||
|
||||
|
||||
@@ -26,9 +26,6 @@
|
||||
"detected_objects": {
|
||||
"default": "mdi:account-group"
|
||||
},
|
||||
"detected_objects_with_channel_name": {
|
||||
"default": "mdi:account-group"
|
||||
},
|
||||
"gas_concentration": {
|
||||
"default": "mdi:gauge"
|
||||
},
|
||||
@@ -41,21 +38,9 @@
|
||||
"lamp_life": {
|
||||
"default": "mdi:progress-wrench"
|
||||
},
|
||||
"left_slot_level": {
|
||||
"default": "mdi:bottle-tonic-outline"
|
||||
},
|
||||
"left_slot_vial": {
|
||||
"default": "mdi:scent"
|
||||
},
|
||||
"operation": {
|
||||
"default": "mdi:cog-transfer"
|
||||
},
|
||||
"right_slot_level": {
|
||||
"default": "mdi:bottle-tonic-outline"
|
||||
},
|
||||
"right_slot_vial": {
|
||||
"default": "mdi:scent"
|
||||
},
|
||||
"self_test": {
|
||||
"default": "mdi:progress-wrench"
|
||||
},
|
||||
@@ -67,6 +52,12 @@
|
||||
},
|
||||
"valve_status": {
|
||||
"default": "mdi:valve"
|
||||
},
|
||||
"vial_level": {
|
||||
"default": "mdi:bottle-tonic-outline"
|
||||
},
|
||||
"vial_name": {
|
||||
"default": "mdi:scent"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -163,29 +163,7 @@
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"adc": {
|
||||
"name": "ADC"
|
||||
},
|
||||
"analog": {
|
||||
"name": "Analog"
|
||||
},
|
||||
"analog_value": {
|
||||
"name": "Analog value"
|
||||
},
|
||||
"analog_value_with_channel_name": {
|
||||
"name": "{channel_name} analog value"
|
||||
},
|
||||
"analog_with_channel_name": {
|
||||
"name": "{channel_name} analog"
|
||||
},
|
||||
"apparent_power_with_channel_name": {
|
||||
"name": "{channel_name} apparent power"
|
||||
},
|
||||
"average_temperature": {
|
||||
"name": "Average temperature"
|
||||
},
|
||||
"charger_state": {
|
||||
"name": "Charger state",
|
||||
"state": {
|
||||
"charger_charging": "[%key:common::state::charging%]",
|
||||
"charger_end": "Charge completed",
|
||||
@@ -197,43 +175,10 @@
|
||||
"charger_wait": "Charging paused by vehicle"
|
||||
}
|
||||
},
|
||||
"current_with_channel_name": {
|
||||
"name": "{channel_name} current"
|
||||
},
|
||||
"detected_objects": {
|
||||
"name": "Detected objects",
|
||||
"unit_of_measurement": "objects"
|
||||
},
|
||||
"detected_objects_with_channel_name": {
|
||||
"name": "{channel_name} detected objects",
|
||||
"unit_of_measurement": "objects"
|
||||
},
|
||||
"device_temperature": {
|
||||
"name": "Device temperature"
|
||||
},
|
||||
"energy_consumed": {
|
||||
"name": "Energy consumed"
|
||||
},
|
||||
"energy_consumed_with_channel_name": {
|
||||
"name": "{channel_name} energy consumed"
|
||||
},
|
||||
"energy_returned": {
|
||||
"name": "Energy returned"
|
||||
},
|
||||
"energy_returned_with_channel_name": {
|
||||
"name": "{channel_name} energy returned"
|
||||
},
|
||||
"energy_with_channel_name": {
|
||||
"name": "{channel_name} energy"
|
||||
},
|
||||
"frequency_with_channel_name": {
|
||||
"name": "{channel_name} frequency"
|
||||
},
|
||||
"gas_concentration": {
|
||||
"name": "Gas concentration"
|
||||
},
|
||||
"gas_detected": {
|
||||
"name": "Gas detected",
|
||||
"state": {
|
||||
"heavy": "Heavy",
|
||||
"mild": "Mild",
|
||||
@@ -251,81 +196,21 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"humidity_with_channel_name": {
|
||||
"name": "{channel_name} humidity"
|
||||
},
|
||||
"illuminance_level": {
|
||||
"name": "Illuminance level",
|
||||
"state": {
|
||||
"bright": "Bright",
|
||||
"dark": "Dark",
|
||||
"twilight": "Twilight"
|
||||
}
|
||||
},
|
||||
"lamp_life": {
|
||||
"name": "Lamp life"
|
||||
},
|
||||
"last_restart": {
|
||||
"name": "Last restart"
|
||||
},
|
||||
"left_slot_level": {
|
||||
"name": "Left slot level"
|
||||
},
|
||||
"left_slot_vial": {
|
||||
"name": "Left slot vial"
|
||||
},
|
||||
"neutral_current": {
|
||||
"name": "Neutral current"
|
||||
},
|
||||
"operation": {
|
||||
"name": "Operation",
|
||||
"state": {
|
||||
"fault": "[%key:common::state::fault%]",
|
||||
"normal": "[%key:common::state::normal%]",
|
||||
"warmup": "Warm-up"
|
||||
}
|
||||
},
|
||||
"power_factor_with_channel_name": {
|
||||
"name": "{channel_name} power factor"
|
||||
},
|
||||
"power_with_channel_name": {
|
||||
"name": "{channel_name} power"
|
||||
},
|
||||
"pulse_counter": {
|
||||
"name": "Pulse counter"
|
||||
},
|
||||
"pulse_counter_frequency": {
|
||||
"name": "Pulse counter frequency"
|
||||
},
|
||||
"pulse_counter_frequency_value": {
|
||||
"name": "Pulse counter frequency value"
|
||||
},
|
||||
"pulse_counter_frequency_value_with_channel_name": {
|
||||
"name": "{channel_name} pulse counter frequency value"
|
||||
},
|
||||
"pulse_counter_frequency_with_channel_name": {
|
||||
"name": "{channel_name} pulse counter frequency"
|
||||
},
|
||||
"pulse_counter_value": {
|
||||
"name": "Pulse counter value"
|
||||
},
|
||||
"pulse_counter_value_with_channel_name": {
|
||||
"name": "{channel_name} Pulse counter value"
|
||||
},
|
||||
"pulse_counter_with_channel_name": {
|
||||
"name": "{channel_name} pulse counter"
|
||||
},
|
||||
"rainfall_last_24h": {
|
||||
"name": "Rainfall last 24h"
|
||||
},
|
||||
"right_slot_level": {
|
||||
"name": "Right slot level"
|
||||
},
|
||||
"right_slot_vial": {
|
||||
"name": "Right slot vial"
|
||||
},
|
||||
"self_test": {
|
||||
"name": "Self test",
|
||||
"state": {
|
||||
"completed": "Completed",
|
||||
"not_completed": "Not completed",
|
||||
@@ -343,23 +228,7 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"session_duration": {
|
||||
"name": "Session duration"
|
||||
},
|
||||
"session_energy": {
|
||||
"name": "Session energy"
|
||||
},
|
||||
"temperature_with_channel_name": {
|
||||
"name": "{channel_name} temperature"
|
||||
},
|
||||
"tilt": {
|
||||
"name": "Tilt"
|
||||
},
|
||||
"valve_position": {
|
||||
"name": "Valve position"
|
||||
},
|
||||
"valve_status": {
|
||||
"name": "Valve status",
|
||||
"state": {
|
||||
"checking": "Checking",
|
||||
"closed": "[%key:common::state::closed%]",
|
||||
@@ -368,30 +237,6 @@
|
||||
"opened": "Opened",
|
||||
"opening": "[%key:common::state::opening%]"
|
||||
}
|
||||
},
|
||||
"voltage_with_channel_name": {
|
||||
"name": "{channel_name} voltage"
|
||||
},
|
||||
"voltage_with_phase_name": {
|
||||
"name": "Phase {phase_name} voltage"
|
||||
},
|
||||
"voltmeter": {
|
||||
"name": "Voltmeter"
|
||||
},
|
||||
"voltmeter_value": {
|
||||
"name": "Voltmeter value"
|
||||
},
|
||||
"water_consumption": {
|
||||
"name": "Water consumption"
|
||||
},
|
||||
"water_flow_rate": {
|
||||
"name": "Water flow rate"
|
||||
},
|
||||
"water_pressure": {
|
||||
"name": "Water pressure"
|
||||
},
|
||||
"water_temperature": {
|
||||
"name": "Water temperature"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -49,7 +49,6 @@ from homeassistant.helpers.device_registry import (
|
||||
DeviceInfo,
|
||||
)
|
||||
from homeassistant.helpers.network import NoURLAvailableError, get_url
|
||||
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
from homeassistant.util.dt import utcnow
|
||||
|
||||
from .const import (
|
||||
@@ -120,12 +119,12 @@ def get_number_of_channels(device: BlockDevice, block: Block) -> int:
|
||||
def get_block_entity_name(
|
||||
device: BlockDevice,
|
||||
block: Block | None,
|
||||
description: str | UndefinedType | None = None,
|
||||
description: str | None = None,
|
||||
) -> str | None:
|
||||
"""Naming for block based switch and sensors."""
|
||||
channel_name = get_block_channel_name(device, block)
|
||||
|
||||
if description is not UNDEFINED and description:
|
||||
if description:
|
||||
return f"{channel_name} {description.lower()}" if channel_name else description
|
||||
|
||||
return channel_name
|
||||
@@ -443,15 +442,12 @@ def get_rpc_sub_device_name(
|
||||
|
||||
|
||||
def get_rpc_entity_name(
|
||||
device: RpcDevice,
|
||||
key: str,
|
||||
name: str | UndefinedType | None = None,
|
||||
role: str | None = None,
|
||||
device: RpcDevice, key: str, name: str | None = None, role: str | None = None
|
||||
) -> str | None:
|
||||
"""Naming for RPC based switch and sensors."""
|
||||
channel_name = get_rpc_channel_name(device, key)
|
||||
|
||||
if name is not UNDEFINED and name:
|
||||
if name:
|
||||
if role and role != ROLE_GENERIC:
|
||||
return name
|
||||
return f"{channel_name} {name.lower()}" if channel_name else name
|
||||
|
||||
@@ -45,8 +45,8 @@
|
||||
"name": "Fuel drying",
|
||||
"state": {
|
||||
"dry": "Dry",
|
||||
"extremely_dry": "Extremely dry",
|
||||
"moderate_wet": "Moderately wet",
|
||||
"extremely_dry": "Extemely dry",
|
||||
"moderate_wet": "Moderate wet",
|
||||
"very_dry": "Very dry",
|
||||
"very_wet": "Very wet",
|
||||
"wet": "Wet"
|
||||
|
||||
@@ -57,7 +57,4 @@ async def async_setup_entry(hass: HomeAssistant, entry: SolarEdgeConfigEntry) ->
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: SolarEdgeConfigEntry) -> bool:
|
||||
"""Unload SolarEdge config entry."""
|
||||
if DATA_API_CLIENT not in entry.runtime_data:
|
||||
return True # Nothing to unload
|
||||
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
@@ -133,11 +133,8 @@ class SolarEdgeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
if api_key_ok and web_login_ok:
|
||||
data = {CONF_SITE_ID: site_id}
|
||||
if api_key:
|
||||
data[CONF_API_KEY] = api_key
|
||||
if username:
|
||||
data[CONF_USERNAME] = username
|
||||
data[CONF_PASSWORD] = web_auth[CONF_PASSWORD]
|
||||
data.update(api_auth)
|
||||
data.update(web_auth)
|
||||
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
if TYPE_CHECKING:
|
||||
|
||||
@@ -49,9 +49,7 @@ QUERY_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_COLUMN_NAME): cv.string,
|
||||
vol.Required(CONF_NAME): cv.template,
|
||||
vol.Required(CONF_QUERY): vol.All(
|
||||
cv.template, ValueTemplate.from_template, validate_sql_select
|
||||
),
|
||||
vol.Required(CONF_QUERY): vol.All(cv.string, validate_sql_select),
|
||||
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
|
||||
vol.Optional(CONF_VALUE_TEMPLATE): vol.All(
|
||||
cv.template, ValueTemplate.from_template
|
||||
|
||||
@@ -9,6 +9,8 @@ import sqlalchemy
|
||||
from sqlalchemy.engine import Engine, Result
|
||||
from sqlalchemy.exc import MultipleResultsFound, NoSuchColumnError, SQLAlchemyError
|
||||
from sqlalchemy.orm import Session, scoped_session, sessionmaker
|
||||
import sqlparse
|
||||
from sqlparse.exceptions import SQLParseError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.recorder import CONF_DB_URL, get_instance
|
||||
@@ -29,28 +31,21 @@ from homeassistant.const import (
|
||||
CONF_UNIT_OF_MEASUREMENT,
|
||||
CONF_VALUE_TEMPLATE,
|
||||
)
|
||||
from homeassistant.core import async_get_hass, callback
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import section
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers import selector
|
||||
|
||||
from .const import CONF_ADVANCED_OPTIONS, CONF_COLUMN_NAME, CONF_QUERY, DOMAIN
|
||||
from .util import (
|
||||
EmptyQueryError,
|
||||
InvalidSqlQuery,
|
||||
MultipleQueryError,
|
||||
NotSelectQueryError,
|
||||
UnknownQueryTypeError,
|
||||
check_and_render_sql_query,
|
||||
resolve_db_url,
|
||||
)
|
||||
from .util import resolve_db_url
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
OPTIONS_SCHEMA: vol.Schema = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_QUERY): selector.TemplateSelector(),
|
||||
vol.Required(CONF_QUERY): selector.TextSelector(
|
||||
selector.TextSelectorConfig(multiline=True)
|
||||
),
|
||||
vol.Required(CONF_COLUMN_NAME): selector.TextSelector(),
|
||||
vol.Required(CONF_ADVANCED_OPTIONS): section(
|
||||
vol.Schema(
|
||||
@@ -94,12 +89,14 @@ CONFIG_SCHEMA: vol.Schema = vol.Schema(
|
||||
|
||||
def validate_sql_select(value: str) -> str:
|
||||
"""Validate that value is a SQL SELECT query."""
|
||||
hass = async_get_hass()
|
||||
try:
|
||||
return check_and_render_sql_query(hass, value)
|
||||
except (TemplateError, InvalidSqlQuery) as err:
|
||||
_LOGGER.debug("Invalid query '%s' results in '%s'", value, err.args[0])
|
||||
raise
|
||||
if len(query := sqlparse.parse(value.lstrip().lstrip(";"))) > 1:
|
||||
raise MultipleResultsFound
|
||||
if len(query) == 0 or (query_type := query[0].get_type()) == "UNKNOWN":
|
||||
raise ValueError
|
||||
if query_type != "SELECT":
|
||||
_LOGGER.debug("The SQL query %s is of type %s", query, query_type)
|
||||
raise SQLParseError
|
||||
return str(query[0])
|
||||
|
||||
|
||||
def validate_db_connection(db_url: str) -> bool:
|
||||
@@ -141,7 +138,7 @@ def validate_query(db_url: str, query: str, column: str) -> bool:
|
||||
if sess:
|
||||
sess.close()
|
||||
engine.dispose()
|
||||
raise InvalidSqlQuery from error
|
||||
raise ValueError(error) from error
|
||||
|
||||
for res in result.mappings():
|
||||
if column not in res:
|
||||
@@ -227,13 +224,13 @@ class SQLConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
except NoSuchColumnError:
|
||||
errors["column"] = "column_invalid"
|
||||
description_placeholders = {"column": column}
|
||||
except (MultipleResultsFound, MultipleQueryError):
|
||||
except MultipleResultsFound:
|
||||
errors["query"] = "multiple_queries"
|
||||
except SQLAlchemyError:
|
||||
errors["db_url"] = "db_url_invalid"
|
||||
except (NotSelectQueryError, UnknownQueryTypeError):
|
||||
except SQLParseError:
|
||||
errors["query"] = "query_no_read_only"
|
||||
except (TemplateError, EmptyQueryError, InvalidSqlQuery) as err:
|
||||
except ValueError as err:
|
||||
_LOGGER.debug("Invalid query: %s", err)
|
||||
errors["query"] = "query_invalid"
|
||||
|
||||
@@ -285,13 +282,13 @@ class SQLOptionsFlowHandler(OptionsFlowWithReload):
|
||||
except NoSuchColumnError:
|
||||
errors["column"] = "column_invalid"
|
||||
description_placeholders = {"column": column}
|
||||
except (MultipleResultsFound, MultipleQueryError):
|
||||
except MultipleResultsFound:
|
||||
errors["query"] = "multiple_queries"
|
||||
except SQLAlchemyError:
|
||||
errors["db_url"] = "db_url_invalid"
|
||||
except (NotSelectQueryError, UnknownQueryTypeError):
|
||||
except SQLParseError:
|
||||
errors["query"] = "query_no_read_only"
|
||||
except (TemplateError, EmptyQueryError, InvalidSqlQuery) as err:
|
||||
except ValueError as err:
|
||||
_LOGGER.debug("Invalid query: %s", err)
|
||||
errors["query"] = "query_invalid"
|
||||
else:
|
||||
|
||||
@@ -22,7 +22,7 @@ from homeassistant.const import (
|
||||
MATCH_ALL,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import PlatformNotReady, TemplateError
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
@@ -40,9 +40,7 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import CONF_ADVANCED_OPTIONS, CONF_COLUMN_NAME, CONF_QUERY, DOMAIN
|
||||
from .util import (
|
||||
InvalidSqlQuery,
|
||||
async_create_sessionmaker,
|
||||
check_and_render_sql_query,
|
||||
convert_value,
|
||||
generate_lambda_stmt,
|
||||
redact_credentials,
|
||||
@@ -83,7 +81,7 @@ async def async_setup_platform(
|
||||
return
|
||||
|
||||
name: Template = conf[CONF_NAME]
|
||||
query_template: ValueTemplate = conf[CONF_QUERY]
|
||||
query_str: str = conf[CONF_QUERY]
|
||||
value_template: ValueTemplate | None = conf.get(CONF_VALUE_TEMPLATE)
|
||||
column_name: str = conf[CONF_COLUMN_NAME]
|
||||
unique_id: str | None = conf.get(CONF_UNIQUE_ID)
|
||||
@@ -98,7 +96,7 @@ async def async_setup_platform(
|
||||
await async_setup_sensor(
|
||||
hass,
|
||||
trigger_entity_config,
|
||||
query_template,
|
||||
query_str,
|
||||
column_name,
|
||||
value_template,
|
||||
unique_id,
|
||||
@@ -121,13 +119,6 @@ async def async_setup_entry(
|
||||
template: str | None = entry.options[CONF_ADVANCED_OPTIONS].get(CONF_VALUE_TEMPLATE)
|
||||
column_name: str = entry.options[CONF_COLUMN_NAME]
|
||||
|
||||
query_template: ValueTemplate | None = None
|
||||
try:
|
||||
query_template = ValueTemplate(query_str, hass)
|
||||
query_template.ensure_valid()
|
||||
except TemplateError as err:
|
||||
raise PlatformNotReady("Invalid SQL query template") from err
|
||||
|
||||
value_template: ValueTemplate | None = None
|
||||
if template is not None:
|
||||
try:
|
||||
@@ -146,7 +137,7 @@ async def async_setup_entry(
|
||||
await async_setup_sensor(
|
||||
hass,
|
||||
trigger_entity_config,
|
||||
query_template,
|
||||
query_str,
|
||||
column_name,
|
||||
value_template,
|
||||
entry.entry_id,
|
||||
@@ -159,7 +150,7 @@ async def async_setup_entry(
|
||||
async def async_setup_sensor(
|
||||
hass: HomeAssistant,
|
||||
trigger_entity_config: ConfigType,
|
||||
query_template: ValueTemplate,
|
||||
query_str: str,
|
||||
column_name: str,
|
||||
value_template: ValueTemplate | None,
|
||||
unique_id: str | None,
|
||||
@@ -175,25 +166,22 @@ async def async_setup_sensor(
|
||||
) = await async_create_sessionmaker(hass, db_url)
|
||||
if sessmaker is None:
|
||||
return
|
||||
validate_query(hass, query_template, uses_recorder_db, unique_id)
|
||||
validate_query(hass, query_str, uses_recorder_db, unique_id)
|
||||
|
||||
query_str = check_and_render_sql_query(hass, query_template)
|
||||
upper_query = query_str.upper()
|
||||
# MSSQL uses TOP and not LIMIT
|
||||
mod_query_template = query_template
|
||||
if not ("LIMIT" in upper_query or "SELECT TOP" in upper_query):
|
||||
if "mssql" in db_url:
|
||||
_query = query_template.template.replace("SELECT", "SELECT TOP 1")
|
||||
query_str = upper_query.replace("SELECT", "SELECT TOP 1")
|
||||
else:
|
||||
_query = query_template.template.replace(";", "") + " LIMIT 1;"
|
||||
mod_query_template = ValueTemplate(_query, hass)
|
||||
query_str = query_str.replace(";", "") + " LIMIT 1;"
|
||||
|
||||
async_add_entities(
|
||||
[
|
||||
SQLSensor(
|
||||
trigger_entity_config,
|
||||
sessmaker,
|
||||
mod_query_template,
|
||||
query_str,
|
||||
column_name,
|
||||
value_template,
|
||||
yaml,
|
||||
@@ -212,7 +200,7 @@ class SQLSensor(ManualTriggerSensorEntity):
|
||||
self,
|
||||
trigger_entity_config: ConfigType,
|
||||
sessmaker: scoped_session,
|
||||
query: ValueTemplate,
|
||||
query: str,
|
||||
column: str,
|
||||
value_template: ValueTemplate | None,
|
||||
yaml: bool,
|
||||
@@ -226,6 +214,7 @@ class SQLSensor(ManualTriggerSensorEntity):
|
||||
self.sessionmaker = sessmaker
|
||||
self._attr_extra_state_attributes = {}
|
||||
self._use_database_executor = use_database_executor
|
||||
self._lambda_stmt = generate_lambda_stmt(query)
|
||||
if not yaml and (unique_id := trigger_entity_config.get(CONF_UNIQUE_ID)):
|
||||
self._attr_name = None
|
||||
self._attr_has_entity_name = True
|
||||
@@ -266,22 +255,11 @@ class SQLSensor(ManualTriggerSensorEntity):
|
||||
self._attr_extra_state_attributes = {}
|
||||
sess: scoped_session = self.sessionmaker()
|
||||
try:
|
||||
rendered_query = check_and_render_sql_query(self.hass, self._query)
|
||||
_lambda_stmt = generate_lambda_stmt(rendered_query)
|
||||
result: Result = sess.execute(_lambda_stmt)
|
||||
except (TemplateError, InvalidSqlQuery) as err:
|
||||
_LOGGER.error(
|
||||
"Error rendering query %s: %s",
|
||||
redact_credentials(self._query.template),
|
||||
redact_credentials(str(err)),
|
||||
)
|
||||
sess.rollback()
|
||||
sess.close()
|
||||
return
|
||||
result: Result = sess.execute(self._lambda_stmt)
|
||||
except SQLAlchemyError as err:
|
||||
_LOGGER.error(
|
||||
"Error executing query %s: %s",
|
||||
rendered_query,
|
||||
self._query,
|
||||
redact_credentials(str(err)),
|
||||
)
|
||||
sess.rollback()
|
||||
@@ -289,7 +267,7 @@ class SQLSensor(ManualTriggerSensorEntity):
|
||||
return
|
||||
|
||||
for res in result.mappings():
|
||||
_LOGGER.debug("Query %s result in %s", rendered_query, res.items())
|
||||
_LOGGER.debug("Query %s result in %s", self._query, res.items())
|
||||
data = res[self._column_name]
|
||||
for key, value in res.items():
|
||||
self._attr_extra_state_attributes[key] = convert_value(value)
|
||||
@@ -309,6 +287,6 @@ class SQLSensor(ManualTriggerSensorEntity):
|
||||
self._attr_native_value = data
|
||||
|
||||
if data is None:
|
||||
_LOGGER.warning("%s returned no results", rendered_query)
|
||||
_LOGGER.warning("%s returned no results", self._query)
|
||||
|
||||
sess.close()
|
||||
|
||||
@@ -19,13 +19,11 @@ from homeassistant.core import (
|
||||
)
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.trigger_template_entity import ValueTemplate
|
||||
from homeassistant.util.json import JsonValueType
|
||||
|
||||
from .const import CONF_QUERY, DOMAIN
|
||||
from .util import (
|
||||
async_create_sessionmaker,
|
||||
check_and_render_sql_query,
|
||||
convert_value,
|
||||
generate_lambda_stmt,
|
||||
redact_credentials,
|
||||
@@ -39,9 +37,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
SERVICE_QUERY = "query"
|
||||
SERVICE_QUERY_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_QUERY): vol.All(
|
||||
cv.template, ValueTemplate.from_template, validate_sql_select
|
||||
),
|
||||
vol.Required(CONF_QUERY): vol.All(cv.string, validate_sql_select),
|
||||
vol.Optional(CONF_DB_URL): cv.string,
|
||||
}
|
||||
)
|
||||
@@ -76,9 +72,8 @@ async def _async_query_service(
|
||||
def _execute_and_convert_query() -> list[JsonValueType]:
|
||||
"""Execute the query and return the results with converted types."""
|
||||
sess: Session = sessmaker()
|
||||
rendered_query = check_and_render_sql_query(call.hass, query_str)
|
||||
try:
|
||||
result: Result = sess.execute(generate_lambda_stmt(rendered_query))
|
||||
result: Result = sess.execute(generate_lambda_stmt(query_str))
|
||||
except SQLAlchemyError as err:
|
||||
_LOGGER.debug(
|
||||
"Error executing query %s: %s",
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"db_url_invalid": "Database URL invalid",
|
||||
"multiple_queries": "Multiple SQL queries are not supported",
|
||||
"query_invalid": "SQL query invalid",
|
||||
"query_no_read_only": "SQL query is not a read-only SELECT query or it's of an unknown type"
|
||||
"query_no_read_only": "SQL query must be read-only"
|
||||
},
|
||||
"step": {
|
||||
"options": {
|
||||
|
||||
@@ -19,9 +19,7 @@ import voluptuous as vol
|
||||
from homeassistant.components.recorder import SupportedDialect, get_instance
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, TemplateError
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.helpers.template import Template
|
||||
|
||||
from .const import DB_URL_RE, DOMAIN
|
||||
from .models import SQLData
|
||||
@@ -46,14 +44,16 @@ def resolve_db_url(hass: HomeAssistant, db_url: str | None) -> str:
|
||||
return get_instance(hass).db_url
|
||||
|
||||
|
||||
def validate_sql_select(value: Template) -> Template:
|
||||
def validate_sql_select(value: str) -> str:
|
||||
"""Validate that value is a SQL SELECT query."""
|
||||
try:
|
||||
assert value.hass
|
||||
check_and_render_sql_query(value.hass, value)
|
||||
except (TemplateError, InvalidSqlQuery) as err:
|
||||
raise vol.Invalid(str(err)) from err
|
||||
return value
|
||||
if len(query := sqlparse.parse(value.lstrip().lstrip(";"))) > 1:
|
||||
raise vol.Invalid("Multiple SQL queries are not supported")
|
||||
if len(query) == 0 or (query_type := query[0].get_type()) == "UNKNOWN":
|
||||
raise vol.Invalid("Invalid SQL query")
|
||||
if query_type != "SELECT":
|
||||
_LOGGER.debug("The SQL query %s is of type %s", query, query_type)
|
||||
raise vol.Invalid("Only SELECT queries allowed")
|
||||
return str(query[0])
|
||||
|
||||
|
||||
async def async_create_sessionmaker(
|
||||
@@ -113,7 +113,7 @@ async def async_create_sessionmaker(
|
||||
|
||||
def validate_query(
|
||||
hass: HomeAssistant,
|
||||
query_template: str | Template,
|
||||
query_str: str,
|
||||
uses_recorder_db: bool,
|
||||
unique_id: str | None = None,
|
||||
) -> None:
|
||||
@@ -121,7 +121,7 @@ def validate_query(
|
||||
|
||||
Args:
|
||||
hass: The Home Assistant instance.
|
||||
query_template: The SQL query string to be validated.
|
||||
query_str: The SQL query string to be validated.
|
||||
uses_recorder_db: A boolean indicating if the query is against the recorder database.
|
||||
unique_id: The unique ID of the entity, used for creating issue registry keys.
|
||||
|
||||
@@ -131,10 +131,6 @@ def validate_query(
|
||||
"""
|
||||
if not uses_recorder_db:
|
||||
return
|
||||
if isinstance(query_template, Template):
|
||||
query_str = query_template.async_render()
|
||||
else:
|
||||
query_str = Template(query_template, hass).async_render()
|
||||
redacted_query = redact_credentials(query_str)
|
||||
|
||||
issue_key = unique_id if unique_id else redacted_query
|
||||
@@ -243,49 +239,3 @@ def convert_value(value: Any) -> Any:
|
||||
return f"0x{value.hex()}"
|
||||
case _:
|
||||
return value
|
||||
|
||||
|
||||
def check_and_render_sql_query(hass: HomeAssistant, query: Template | str) -> str:
|
||||
"""Check and render SQL query."""
|
||||
if isinstance(query, str):
|
||||
query = query.strip()
|
||||
if not query:
|
||||
raise EmptyQueryError("Query cannot be empty")
|
||||
query = Template(query, hass=hass)
|
||||
|
||||
# Raises TemplateError if template is invalid
|
||||
query.ensure_valid()
|
||||
rendered_query: str = query.async_render()
|
||||
|
||||
if len(rendered_queries := sqlparse.parse(rendered_query.lstrip().lstrip(";"))) > 1:
|
||||
raise MultipleQueryError("Multiple SQL statements are not allowed")
|
||||
if (
|
||||
len(rendered_queries) == 0
|
||||
or (query_type := rendered_queries[0].get_type()) == "UNKNOWN"
|
||||
):
|
||||
raise UnknownQueryTypeError("SQL query is empty or unknown type")
|
||||
if query_type != "SELECT":
|
||||
_LOGGER.debug("The SQL query %s is of type %s", rendered_query, query_type)
|
||||
raise NotSelectQueryError("SQL query must be of type SELECT")
|
||||
|
||||
return str(rendered_queries[0])
|
||||
|
||||
|
||||
class InvalidSqlQuery(HomeAssistantError):
|
||||
"""SQL query is invalid error."""
|
||||
|
||||
|
||||
class EmptyQueryError(InvalidSqlQuery):
|
||||
"""SQL query is empty error."""
|
||||
|
||||
|
||||
class MultipleQueryError(InvalidSqlQuery):
|
||||
"""SQL query is multiple error."""
|
||||
|
||||
|
||||
class UnknownQueryTypeError(InvalidSqlQuery):
|
||||
"""SQL query is of unknown type error."""
|
||||
|
||||
|
||||
class NotSelectQueryError(InvalidSqlQuery):
|
||||
"""SQL query is not a SELECT statement error."""
|
||||
|
||||
@@ -17,7 +17,7 @@ from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
from .coordinator import StookwijzerConfigEntry, StookwijzerCoordinator
|
||||
from .services import async_setup_services
|
||||
from .services import setup_services
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
@@ -26,7 +26,7 @@ CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Stookwijzer component."""
|
||||
async_setup_services(hass)
|
||||
setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -11,7 +11,6 @@ from homeassistant.core import (
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
|
||||
@@ -52,8 +51,7 @@ def async_get_entry(
|
||||
return cast(StookwijzerConfigEntry, entry)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
def setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the Stookwijzer integration."""
|
||||
|
||||
async def async_get_forecast(call: ServiceCall) -> ServiceResponse | None:
|
||||
|
||||
@@ -43,5 +43,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["tuya_sharing"],
|
||||
"requirements": ["tuya-device-sharing-sdk==0.2.5"]
|
||||
"requirements": ["tuya-device-sharing-sdk==0.2.4"]
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ import base64
|
||||
from dataclasses import dataclass
|
||||
import json
|
||||
import struct
|
||||
from typing import Any, Literal, Self, overload
|
||||
from typing import Literal, Self, overload
|
||||
|
||||
from tuya_sharing import CustomerDevice
|
||||
|
||||
@@ -14,76 +14,6 @@ from .const import DPCode, DPType
|
||||
from .util import remap_value
|
||||
|
||||
|
||||
@dataclass
|
||||
class DPCodeWrapper:
|
||||
"""Base DPCode wrapper.
|
||||
|
||||
Used as a common interface for referring to a DPCode, and
|
||||
access read conversion routines.
|
||||
"""
|
||||
|
||||
dpcode: str
|
||||
|
||||
def _read_device_status_raw(self, device: CustomerDevice) -> Any | None:
|
||||
"""Read the raw device status for the DPCode.
|
||||
|
||||
Private helper method for `read_device_status`.
|
||||
"""
|
||||
return device.status.get(self.dpcode)
|
||||
|
||||
def read_device_status(self, device: CustomerDevice) -> Any | None:
|
||||
"""Read the device value for the dpcode."""
|
||||
raise NotImplementedError("read_device_value must be implemented")
|
||||
|
||||
|
||||
@dataclass
|
||||
class DPCodeBooleanWrapper(DPCodeWrapper):
|
||||
"""Simple wrapper for boolean values.
|
||||
|
||||
Supports True/False only.
|
||||
"""
|
||||
|
||||
def read_device_status(self, device: CustomerDevice) -> bool | None:
|
||||
"""Read the device value for the dpcode."""
|
||||
if (raw_value := self._read_device_status_raw(device)) in (True, False):
|
||||
return raw_value
|
||||
return None
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class DPCodeEnumWrapper(DPCodeWrapper):
|
||||
"""Simple wrapper for EnumTypeData values."""
|
||||
|
||||
enum_type_information: EnumTypeData
|
||||
|
||||
def read_device_status(self, device: CustomerDevice) -> str | None:
|
||||
"""Read the device value for the dpcode.
|
||||
|
||||
Values outside of the list defined by the Enum type information will
|
||||
return None.
|
||||
"""
|
||||
if (
|
||||
raw_value := self._read_device_status_raw(device)
|
||||
) in self.enum_type_information.range:
|
||||
return raw_value
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def find_dpcode(
|
||||
cls,
|
||||
device: CustomerDevice,
|
||||
dpcodes: str | DPCode | tuple[DPCode, ...],
|
||||
*,
|
||||
prefer_function: bool = False,
|
||||
) -> Self | None:
|
||||
"""Find and return a DPCodeEnumWrapper for the given DP codes."""
|
||||
if enum_type := find_dpcode(
|
||||
device, dpcodes, dptype=DPType.ENUM, prefer_function=prefer_function
|
||||
):
|
||||
return cls(dpcode=enum_type.dpcode, enum_type_information=enum_type)
|
||||
return None
|
||||
|
||||
|
||||
@overload
|
||||
def find_dpcode(
|
||||
device: CustomerDevice,
|
||||
@@ -123,31 +53,33 @@ def find_dpcode(
|
||||
elif not isinstance(dpcodes, tuple):
|
||||
dpcodes = (dpcodes,)
|
||||
|
||||
lookup_tuple = (
|
||||
(device.function, device.status_range)
|
||||
if prefer_function
|
||||
else (device.status_range, device.function)
|
||||
)
|
||||
order = ["status_range", "function"]
|
||||
if prefer_function:
|
||||
order = ["function", "status_range"]
|
||||
|
||||
for dpcode in dpcodes:
|
||||
for device_specs in lookup_tuple:
|
||||
if not (
|
||||
(current_definition := device_specs.get(dpcode))
|
||||
and current_definition.type == dptype
|
||||
):
|
||||
for key in order:
|
||||
if dpcode not in getattr(device, key):
|
||||
continue
|
||||
if dptype is DPType.ENUM:
|
||||
if (
|
||||
dptype == DPType.ENUM
|
||||
and getattr(device, key)[dpcode].type == DPType.ENUM
|
||||
):
|
||||
if not (
|
||||
enum_type := EnumTypeData.from_json(
|
||||
dpcode, current_definition.values
|
||||
dpcode, getattr(device, key)[dpcode].values
|
||||
)
|
||||
):
|
||||
continue
|
||||
return enum_type
|
||||
if dptype is DPType.INTEGER:
|
||||
|
||||
if (
|
||||
dptype == DPType.INTEGER
|
||||
and getattr(device, key)[dpcode].type == DPType.INTEGER
|
||||
):
|
||||
if not (
|
||||
integer_type := IntegerTypeData.from_json(
|
||||
dpcode, current_definition.values
|
||||
dpcode, getattr(device, key)[dpcode].values
|
||||
)
|
||||
):
|
||||
continue
|
||||
|
||||
@@ -11,9 +11,9 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import TuyaConfigEntry
|
||||
from .const import TUYA_DISCOVERY_NEW, DeviceCategory, DPCode
|
||||
from .const import TUYA_DISCOVERY_NEW, DeviceCategory, DPCode, DPType
|
||||
from .entity import TuyaEntity
|
||||
from .models import DPCodeEnumWrapper
|
||||
from .models import find_dpcode
|
||||
|
||||
# All descriptions can be found here. Mostly the Enum data types in the
|
||||
# default instructions set of each category end up being a select.
|
||||
@@ -360,15 +360,9 @@ async def async_setup_entry(
|
||||
device = manager.device_map[device_id]
|
||||
if descriptions := SELECTS.get(device.category):
|
||||
entities.extend(
|
||||
TuyaSelectEntity(
|
||||
device, manager, description, dpcode_wrapper=dpcode_wrapper
|
||||
)
|
||||
TuyaSelectEntity(device, manager, description)
|
||||
for description in descriptions
|
||||
if (
|
||||
dpcode_wrapper := DPCodeEnumWrapper.find_dpcode(
|
||||
device, description.key, prefer_function=True
|
||||
)
|
||||
)
|
||||
if description.key in device.status
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
@@ -388,20 +382,35 @@ class TuyaSelectEntity(TuyaEntity, SelectEntity):
|
||||
device: CustomerDevice,
|
||||
device_manager: Manager,
|
||||
description: SelectEntityDescription,
|
||||
dpcode_wrapper: DPCodeEnumWrapper,
|
||||
) -> None:
|
||||
"""Init Tuya sensor."""
|
||||
super().__init__(device, device_manager)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{super().unique_id}{description.key}"
|
||||
self._dpcode_wrapper = dpcode_wrapper
|
||||
self._attr_options = dpcode_wrapper.enum_type_information.range
|
||||
|
||||
self._attr_options: list[str] = []
|
||||
if enum_type := find_dpcode(
|
||||
self.device, description.key, dptype=DPType.ENUM, prefer_function=True
|
||||
):
|
||||
self._attr_options = enum_type.range
|
||||
|
||||
@property
|
||||
def current_option(self) -> str | None:
|
||||
"""Return the selected entity option to represent the entity state."""
|
||||
return self._dpcode_wrapper.read_device_status(self.device)
|
||||
# Raw value
|
||||
value = self.device.status.get(self.entity_description.key)
|
||||
if value is None or value not in self._attr_options:
|
||||
return None
|
||||
|
||||
return value
|
||||
|
||||
def select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
self._send_command([{"code": self._dpcode_wrapper.dpcode, "value": option}])
|
||||
self._send_command(
|
||||
[
|
||||
{
|
||||
"code": self.entity_description.key,
|
||||
"value": option,
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
@@ -27,7 +27,6 @@ from homeassistant.helpers.issue_registry import (
|
||||
from . import TuyaConfigEntry
|
||||
from .const import DOMAIN, TUYA_DISCOVERY_NEW, DeviceCategory, DPCode
|
||||
from .entity import TuyaEntity
|
||||
from .models import DPCodeBooleanWrapper
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
@@ -939,12 +938,7 @@ async def async_setup_entry(
|
||||
device = manager.device_map[device_id]
|
||||
if descriptions := SWITCHES.get(device.category):
|
||||
entities.extend(
|
||||
TuyaSwitchEntity(
|
||||
device,
|
||||
manager,
|
||||
description,
|
||||
DPCodeBooleanWrapper(description.key),
|
||||
)
|
||||
TuyaSwitchEntity(device, manager, description)
|
||||
for description in descriptions
|
||||
if description.key in device.status
|
||||
and _check_deprecation(
|
||||
@@ -1021,23 +1015,21 @@ class TuyaSwitchEntity(TuyaEntity, SwitchEntity):
|
||||
device: CustomerDevice,
|
||||
device_manager: Manager,
|
||||
description: SwitchEntityDescription,
|
||||
dpcode_wrapper: DPCodeBooleanWrapper,
|
||||
) -> None:
|
||||
"""Init TuyaHaSwitch."""
|
||||
super().__init__(device, device_manager)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{super().unique_id}{description.key}"
|
||||
self._dpcode_wrapper = dpcode_wrapper
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if switch is on."""
|
||||
return self._dpcode_wrapper.read_device_status(self.device)
|
||||
return self.device.status.get(self.entity_description.key, False)
|
||||
|
||||
def turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch on."""
|
||||
self._send_command([{"code": self._dpcode_wrapper.dpcode, "value": True}])
|
||||
self._send_command([{"code": self.entity_description.key, "value": True}])
|
||||
|
||||
def turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch off."""
|
||||
self._send_command([{"code": self._dpcode_wrapper.dpcode, "value": False}])
|
||||
self._send_command([{"code": self.entity_description.key, "value": False}])
|
||||
|
||||
@@ -17,7 +17,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from . import TuyaConfigEntry
|
||||
from .const import TUYA_DISCOVERY_NEW, DeviceCategory, DPCode
|
||||
from .entity import TuyaEntity
|
||||
from .models import DPCodeBooleanWrapper
|
||||
|
||||
VALVES: dict[DeviceCategory, tuple[ValveEntityDescription, ...]] = {
|
||||
DeviceCategory.SFKZQ: (
|
||||
@@ -94,12 +93,7 @@ async def async_setup_entry(
|
||||
device = manager.device_map[device_id]
|
||||
if descriptions := VALVES.get(device.category):
|
||||
entities.extend(
|
||||
TuyaValveEntity(
|
||||
device,
|
||||
manager,
|
||||
description,
|
||||
DPCodeBooleanWrapper(description.key),
|
||||
)
|
||||
TuyaValveEntity(device, manager, description)
|
||||
for description in descriptions
|
||||
if description.key in device.status
|
||||
)
|
||||
@@ -123,29 +117,25 @@ class TuyaValveEntity(TuyaEntity, ValveEntity):
|
||||
device: CustomerDevice,
|
||||
device_manager: Manager,
|
||||
description: ValveEntityDescription,
|
||||
dpcode_wrapper: DPCodeBooleanWrapper,
|
||||
) -> None:
|
||||
"""Init TuyaValveEntity."""
|
||||
super().__init__(device, device_manager)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{super().unique_id}{description.key}"
|
||||
self._dpcode_wrapper = dpcode_wrapper
|
||||
|
||||
@property
|
||||
def is_closed(self) -> bool | None:
|
||||
def is_closed(self) -> bool:
|
||||
"""Return if the valve is closed."""
|
||||
if (is_open := self._dpcode_wrapper.read_device_status(self.device)) is None:
|
||||
return None
|
||||
return not is_open
|
||||
return not self.device.status.get(self.entity_description.key, False)
|
||||
|
||||
async def async_open_valve(self) -> None:
|
||||
"""Open the valve."""
|
||||
await self.hass.async_add_executor_job(
|
||||
self._send_command, [{"code": self._dpcode_wrapper.dpcode, "value": True}]
|
||||
self._send_command, [{"code": self.entity_description.key, "value": True}]
|
||||
)
|
||||
|
||||
async def async_close_valve(self) -> None:
|
||||
"""Close the valve."""
|
||||
await self.hass.async_add_executor_job(
|
||||
self._send_command, [{"code": self._dpcode_wrapper.dpcode, "value": False}]
|
||||
self._send_command, [{"code": self.entity_description.key, "value": False}]
|
||||
)
|
||||
|
||||
@@ -41,11 +41,8 @@ from homeassistant.helpers import (
|
||||
template,
|
||||
)
|
||||
from homeassistant.helpers.condition import (
|
||||
async_from_config as async_condition_from_config,
|
||||
async_get_all_descriptions as async_get_all_condition_descriptions,
|
||||
async_subscribe_platform_events as async_subscribe_condition_platform_events,
|
||||
async_validate_condition_config,
|
||||
async_validate_conditions_config,
|
||||
)
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entityfilter import (
|
||||
@@ -69,9 +66,7 @@ from homeassistant.helpers.service import (
|
||||
)
|
||||
from homeassistant.helpers.trigger import (
|
||||
async_get_all_descriptions as async_get_all_trigger_descriptions,
|
||||
async_initialize_triggers,
|
||||
async_subscribe_platform_events as async_subscribe_trigger_platform_events,
|
||||
async_validate_trigger_config,
|
||||
)
|
||||
from homeassistant.loader import (
|
||||
IntegrationNotFound,
|
||||
@@ -890,7 +885,10 @@ async def handle_subscribe_trigger(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Handle subscribe trigger command."""
|
||||
trigger_config = await async_validate_trigger_config(hass, msg["trigger"])
|
||||
# Circular dep
|
||||
from homeassistant.helpers import trigger # noqa: PLC0415
|
||||
|
||||
trigger_config = await trigger.async_validate_trigger_config(hass, msg["trigger"])
|
||||
|
||||
@callback
|
||||
def forward_triggers(
|
||||
@@ -907,7 +905,7 @@ async def handle_subscribe_trigger(
|
||||
)
|
||||
|
||||
connection.subscriptions[msg["id"]] = (
|
||||
await async_initialize_triggers(
|
||||
await trigger.async_initialize_triggers(
|
||||
hass,
|
||||
trigger_config,
|
||||
forward_triggers,
|
||||
@@ -937,10 +935,13 @@ async def handle_test_condition(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Handle test condition command."""
|
||||
# Circular dep
|
||||
from homeassistant.helpers import condition # noqa: PLC0415
|
||||
|
||||
# Do static + dynamic validation of the condition
|
||||
config = await async_validate_condition_config(hass, msg["condition"])
|
||||
config = await condition.async_validate_condition_config(hass, msg["condition"])
|
||||
# Test the condition
|
||||
check_condition = await async_condition_from_config(hass, config)
|
||||
check_condition = await condition.async_from_config(hass, config)
|
||||
connection.send_result(
|
||||
msg["id"], {"result": check_condition(hass, msg.get("variables"))}
|
||||
)
|
||||
@@ -1027,16 +1028,16 @@ async def handle_validate_config(
|
||||
) -> None:
|
||||
"""Handle validate config command."""
|
||||
# Circular dep
|
||||
from homeassistant.helpers import script # noqa: PLC0415
|
||||
from homeassistant.helpers import condition, script, trigger # noqa: PLC0415
|
||||
|
||||
result = {}
|
||||
|
||||
for key, schema, validator in (
|
||||
("triggers", cv.TRIGGER_SCHEMA, async_validate_trigger_config),
|
||||
("triggers", cv.TRIGGER_SCHEMA, trigger.async_validate_trigger_config),
|
||||
(
|
||||
"conditions",
|
||||
cv.CONDITIONS_SCHEMA,
|
||||
async_validate_conditions_config,
|
||||
condition.async_validate_conditions_config,
|
||||
),
|
||||
("actions", cv.SCRIPT_SCHEMA, script.async_validate_actions_config),
|
||||
):
|
||||
|
||||
@@ -8,7 +8,6 @@ import collections
|
||||
from contextlib import suppress
|
||||
from enum import StrEnum
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from typing import Any
|
||||
|
||||
@@ -40,7 +39,7 @@ from homeassistant.config_entries import (
|
||||
)
|
||||
from homeassistant.const import CONF_NAME
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.data_entry_flow import AbortFlow
|
||||
from homeassistant.data_entry_flow import AbortFlow, progress_step
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.selector import FileSelector, FileSelectorConfig
|
||||
@@ -58,8 +57,6 @@ from .radio_manager import (
|
||||
ZhaRadioManager,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_MANUAL_PATH = "Enter Manually"
|
||||
DECONZ_DOMAIN = "deconz"
|
||||
|
||||
@@ -194,14 +191,8 @@ class BaseZhaFlow(ConfigEntryBaseFlow):
|
||||
self._hass = None # type: ignore[assignment]
|
||||
self._radio_mgr = ZhaRadioManager()
|
||||
self._restore_backup_task: asyncio.Task[None] | None = None
|
||||
self._reset_old_radio_task: asyncio.Task[None] | None = None
|
||||
self._form_network_task: asyncio.Task[None] | None = None
|
||||
self._extra_network_config: dict[str, Any] = {}
|
||||
|
||||
# Progress flow steps cannot abort so we need to store the abort reason and then
|
||||
# re-raise it in a dedicated step
|
||||
self._progress_error: AbortFlow | None = None
|
||||
|
||||
@property
|
||||
def hass(self) -> HomeAssistant:
|
||||
"""Return hass."""
|
||||
@@ -233,13 +224,6 @@ class BaseZhaFlow(ConfigEntryBaseFlow):
|
||||
async def _async_create_radio_entry(self) -> ConfigFlowResult:
|
||||
"""Create a config entry with the current flow state."""
|
||||
|
||||
async def async_step_progress_failed(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Abort when progress step failed."""
|
||||
assert self._progress_error is not None
|
||||
raise self._progress_error
|
||||
|
||||
async def async_step_choose_serial_port(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -480,22 +464,7 @@ class BaseZhaFlow(ConfigEntryBaseFlow):
|
||||
self._radio_mgr.chosen_backup = self._radio_mgr.backups[0]
|
||||
return await self.async_step_maybe_reset_old_radio()
|
||||
|
||||
async def _async_reset_old_radio(self, config_entry: ConfigEntry) -> None:
|
||||
"""Do the work of resetting the old radio."""
|
||||
|
||||
# Unload ZHA before connecting to the old adapter
|
||||
with suppress(OperationNotAllowed):
|
||||
await self.hass.config_entries.async_unload(config_entry.entry_id)
|
||||
|
||||
# Create a radio manager to connect to the old stick to reset it
|
||||
temp_radio_mgr = ZhaRadioManager()
|
||||
temp_radio_mgr.hass = self.hass
|
||||
temp_radio_mgr.device_path = config_entry.data[CONF_DEVICE][CONF_DEVICE_PATH]
|
||||
temp_radio_mgr.device_settings = config_entry.data[CONF_DEVICE]
|
||||
temp_radio_mgr.radio_type = RadioType[config_entry.data[CONF_RADIO_TYPE]]
|
||||
|
||||
await temp_radio_mgr.async_reset_adapter()
|
||||
|
||||
@progress_step()
|
||||
async def async_step_maybe_reset_old_radio(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -506,37 +475,30 @@ class BaseZhaFlow(ConfigEntryBaseFlow):
|
||||
DOMAIN, include_ignore=False
|
||||
)
|
||||
|
||||
if not config_entries:
|
||||
return await self.async_step_restore_backup()
|
||||
|
||||
if self._reset_old_radio_task is None:
|
||||
# This will only ever be called during migration, so there must be an
|
||||
# existing config entry
|
||||
if config_entries:
|
||||
assert len(config_entries) == 1
|
||||
config_entry = config_entries[0]
|
||||
|
||||
self._reset_old_radio_task = self.hass.async_create_task(
|
||||
self._async_reset_old_radio(config_entry),
|
||||
"Reset old radio",
|
||||
)
|
||||
# Unload ZHA before connecting to the old adapter
|
||||
with suppress(OperationNotAllowed):
|
||||
await self.hass.config_entries.async_unload(config_entry.entry_id)
|
||||
|
||||
if not self._reset_old_radio_task.done():
|
||||
return self.async_show_progress(
|
||||
step_id="maybe_reset_old_radio",
|
||||
progress_action="maybe_reset_old_radio",
|
||||
progress_task=self._reset_old_radio_task,
|
||||
)
|
||||
# Create a radio manager to connect to the old stick to reset it
|
||||
temp_radio_mgr = ZhaRadioManager()
|
||||
temp_radio_mgr.hass = self.hass
|
||||
temp_radio_mgr.device_path = config_entry.data[CONF_DEVICE][
|
||||
CONF_DEVICE_PATH
|
||||
]
|
||||
temp_radio_mgr.device_settings = config_entry.data[CONF_DEVICE]
|
||||
temp_radio_mgr.radio_type = RadioType[config_entry.data[CONF_RADIO_TYPE]]
|
||||
|
||||
try:
|
||||
await self._reset_old_radio_task
|
||||
except HomeAssistantError:
|
||||
_LOGGER.exception("Failed to reset old radio during migration")
|
||||
# Old adapter not found or cannot connect, show prompt to plug back in
|
||||
return self.async_show_progress_done(next_step_id="plug_in_old_radio")
|
||||
finally:
|
||||
self._reset_old_radio_task = None
|
||||
try:
|
||||
await temp_radio_mgr.async_reset_adapter()
|
||||
except HomeAssistantError:
|
||||
# Old adapter not found or cannot connect, show prompt to plug back in
|
||||
return await self.async_step_plug_in_old_radio()
|
||||
|
||||
return self.async_show_progress_done(next_step_id="restore_backup")
|
||||
return await self.async_step_restore_backup()
|
||||
|
||||
async def async_step_plug_in_old_radio(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -656,35 +618,16 @@ class BaseZhaFlow(ConfigEntryBaseFlow):
|
||||
# This step exists only for translations, it does nothing new
|
||||
return await self.async_step_form_new_network(user_input)
|
||||
|
||||
async def _async_form_new_network(self) -> None:
|
||||
"""Do the work of forming a new network."""
|
||||
await self._radio_mgr.async_form_network(config=self._extra_network_config)
|
||||
# Load the newly formed network settings to get the network info
|
||||
await self._radio_mgr.async_load_network_settings()
|
||||
|
||||
@progress_step()
|
||||
async def async_step_form_new_network(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Form a brand-new network."""
|
||||
if self._form_network_task is None:
|
||||
self._form_network_task = self.hass.async_create_task(
|
||||
self._async_form_new_network(),
|
||||
"Form new network",
|
||||
)
|
||||
await self._radio_mgr.async_form_network(config=self._extra_network_config)
|
||||
|
||||
if not self._form_network_task.done():
|
||||
return self.async_show_progress(
|
||||
step_id="form_new_network",
|
||||
progress_action="form_new_network",
|
||||
progress_task=self._form_network_task,
|
||||
)
|
||||
|
||||
try:
|
||||
await self._form_network_task
|
||||
finally:
|
||||
self._form_network_task = None
|
||||
|
||||
return self.async_show_progress_done(next_step_id="create_entry")
|
||||
# Load the newly formed network settings to get the network info
|
||||
await self._radio_mgr.async_load_network_settings()
|
||||
return await self._async_create_radio_entry()
|
||||
|
||||
def _parse_uploaded_backup(
|
||||
self, uploaded_file_id: str
|
||||
@@ -789,15 +732,13 @@ class BaseZhaFlow(ConfigEntryBaseFlow):
|
||||
next_step_id="pre_confirm_ezsp_ieee_overwrite"
|
||||
)
|
||||
except HomeAssistantError:
|
||||
_LOGGER.exception("Failed to restore network backup to new radio")
|
||||
# User unplugged the new adapter, allow retry
|
||||
return self.async_show_progress_done(next_step_id="pre_plug_in_new_radio")
|
||||
except CannotWriteNetworkSettings as exc:
|
||||
self._progress_error = AbortFlow(
|
||||
return self.async_abort(
|
||||
reason="cannot_restore_backup",
|
||||
description_placeholders={"error": str(exc)},
|
||||
)
|
||||
return self.async_show_progress_done(next_step_id="progress_failed")
|
||||
finally:
|
||||
self._restore_backup_task = None
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
"zha",
|
||||
"universal_silabs_flasher"
|
||||
],
|
||||
"requirements": ["zha==0.0.78"],
|
||||
"requirements": ["zha==0.0.77"],
|
||||
"usb": [
|
||||
{
|
||||
"description": "*2652*",
|
||||
|
||||
@@ -3736,6 +3736,9 @@ class OptionsFlow(ConfigEntryBaseFlow):
|
||||
|
||||
handler: str
|
||||
|
||||
_config_entry: ConfigEntry
|
||||
"""For compatibility only - to be removed in 2025.12"""
|
||||
|
||||
@callback
|
||||
def _async_abort_entries_match(
|
||||
self, match_dict: dict[str, Any] | None = None
|
||||
@@ -3776,10 +3779,26 @@ class OptionsFlow(ConfigEntryBaseFlow):
|
||||
Please note that this is not available inside `__init__` method, and
|
||||
can only be referenced after initialisation.
|
||||
"""
|
||||
# For compatibility only - to be removed in 2025.12
|
||||
if hasattr(self, "_config_entry"):
|
||||
return self._config_entry
|
||||
|
||||
if self.hass is None:
|
||||
raise ValueError("The config entry is not available during initialisation")
|
||||
return self.hass.config_entries.async_get_known_entry(self._config_entry_id)
|
||||
|
||||
@config_entry.setter
|
||||
def config_entry(self, value: ConfigEntry) -> None:
|
||||
"""Set the config entry value."""
|
||||
report_usage(
|
||||
"sets option flow config_entry explicitly, which is deprecated",
|
||||
core_behavior=ReportBehavior.ERROR,
|
||||
core_integration_behavior=ReportBehavior.ERROR,
|
||||
custom_integration_behavior=ReportBehavior.LOG,
|
||||
breaks_in_ha_version="2025.12",
|
||||
)
|
||||
self._config_entry = value
|
||||
|
||||
|
||||
class OptionsFlowWithConfigEntry(OptionsFlow):
|
||||
"""Base class for options flows with config entry and options.
|
||||
|
||||
@@ -3,9 +3,16 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import StrEnum
|
||||
from functools import partial
|
||||
from typing import TYPE_CHECKING, Final
|
||||
|
||||
from .generated.entity_platforms import EntityPlatforms
|
||||
from .helpers.deprecation import (
|
||||
DeprecatedConstantEnum,
|
||||
all_with_deprecated_constants,
|
||||
check_if_deprecated_constant,
|
||||
dir_with_deprecated_constants,
|
||||
)
|
||||
from .util.event_type import EventType
|
||||
from .util.hass_dict import HassKey
|
||||
from .util.signal_type import SignalType
|
||||
@@ -480,6 +487,13 @@ class UnitOfReactivePower(StrEnum):
|
||||
KILO_VOLT_AMPERE_REACTIVE = "kvar"
|
||||
|
||||
|
||||
_DEPRECATED_POWER_VOLT_AMPERE_REACTIVE: Final = DeprecatedConstantEnum(
|
||||
UnitOfReactivePower.VOLT_AMPERE_REACTIVE,
|
||||
"2025.9",
|
||||
)
|
||||
"""Deprecated: please use UnitOfReactivePower.VOLT_AMPERE_REACTIVE."""
|
||||
|
||||
|
||||
# Energy units
|
||||
class UnitOfEnergy(StrEnum):
|
||||
"""Energy units."""
|
||||
@@ -671,6 +685,13 @@ class UnitOfArea(StrEnum):
|
||||
HECTARES = "ha"
|
||||
|
||||
|
||||
_DEPRECATED_AREA_SQUARE_METERS: Final = DeprecatedConstantEnum(
|
||||
UnitOfArea.SQUARE_METERS,
|
||||
"2025.12",
|
||||
)
|
||||
"""Deprecated: please use UnitOfArea.SQUARE_METERS"""
|
||||
|
||||
|
||||
# Mass units
|
||||
class UnitOfMass(StrEnum):
|
||||
"""Mass units."""
|
||||
@@ -993,3 +1014,10 @@ FORMAT_DATETIME: Final = f"{FORMAT_DATE} {FORMAT_TIME}"
|
||||
# This is not a hard limit, but caches and other
|
||||
# data structures will be pre-allocated to this size
|
||||
MAX_EXPECTED_ENTITY_IDS: Final = 16384
|
||||
|
||||
# These can be removed if no deprecated constant are in this module anymore
|
||||
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
|
||||
__dir__ = partial(
|
||||
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
|
||||
)
|
||||
__all__ = all_with_deprecated_constants(globals())
|
||||
|
||||
@@ -40,7 +40,7 @@ hass-nabucasa==1.5.1
|
||||
hassil==3.4.0
|
||||
home-assistant-bluetooth==1.13.1
|
||||
home-assistant-frontend==20251105.0
|
||||
home-assistant-intents==2025.11.7
|
||||
home-assistant-intents==2025.10.28
|
||||
httpx==0.28.1
|
||||
ifaddr==0.2.0
|
||||
Jinja2==3.1.6
|
||||
|
||||
@@ -90,9 +90,7 @@ def run(script_args: list) -> int:
|
||||
help="Exit non-zero if warnings are present",
|
||||
)
|
||||
|
||||
# Parse all args including --config & --script. Do not use script_args.
|
||||
# Example: python -m homeassistant --config "." --script check_config
|
||||
args, unknown = parser.parse_known_args()
|
||||
args, unknown = parser.parse_known_args(script_args)
|
||||
if unknown:
|
||||
print(color("red", "Unknown arguments:", ", ".join(unknown)))
|
||||
|
||||
|
||||
20
requirements_all.txt
generated
20
requirements_all.txt
generated
@@ -190,7 +190,7 @@ aioairzone-cloud==0.7.2
|
||||
aioairzone==1.0.2
|
||||
|
||||
# homeassistant.components.alexa_devices
|
||||
aioamazondevices==8.0.1
|
||||
aioamazondevices==6.5.6
|
||||
|
||||
# homeassistant.components.ambient_network
|
||||
# homeassistant.components.ambient_station
|
||||
@@ -650,7 +650,7 @@ blinkpy==0.24.1
|
||||
blockchain==1.4.4
|
||||
|
||||
# homeassistant.components.blue_current
|
||||
bluecurrent-api==1.3.2
|
||||
bluecurrent-api==1.3.1
|
||||
|
||||
# homeassistant.components.bluemaestro
|
||||
bluemaestro-ble==0.4.1
|
||||
@@ -1187,7 +1187,7 @@ holidays==0.84
|
||||
home-assistant-frontend==20251105.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2025.11.7
|
||||
home-assistant-intents==2025.10.28
|
||||
|
||||
# homeassistant.components.homematicip_cloud
|
||||
homematicip==2.3.1
|
||||
@@ -1486,7 +1486,7 @@ motionblindsble==0.1.3
|
||||
motioneye-client==0.3.14
|
||||
|
||||
# homeassistant.components.bang_olufsen
|
||||
mozart-api==5.1.0.247.1
|
||||
mozart-api==4.1.1.116.4
|
||||
|
||||
# homeassistant.components.mullvad
|
||||
mullvad-api==1.0.0
|
||||
@@ -1612,7 +1612,7 @@ omnilogic==0.4.5
|
||||
ondilo==0.5.0
|
||||
|
||||
# homeassistant.components.onedrive
|
||||
onedrive-personal-sdk==0.0.16
|
||||
onedrive-personal-sdk==0.0.15
|
||||
|
||||
# homeassistant.components.onvif
|
||||
onvif-zeep-async==4.0.4
|
||||
@@ -1719,7 +1719,7 @@ plexauth==0.0.6
|
||||
plexwebsocket==0.0.14
|
||||
|
||||
# homeassistant.components.plugwise
|
||||
plugwise==1.9.0
|
||||
plugwise==1.8.3
|
||||
|
||||
# homeassistant.components.serial_pm
|
||||
pmsensor==0.4
|
||||
@@ -2191,7 +2191,7 @@ pymsteams==0.1.12
|
||||
pymysensors==0.26.0
|
||||
|
||||
# homeassistant.components.iron_os
|
||||
pynecil==4.2.1
|
||||
pynecil==4.2.0
|
||||
|
||||
# homeassistant.components.netgear
|
||||
pynetgear==0.10.10
|
||||
@@ -2283,7 +2283,7 @@ pyplaato==0.0.19
|
||||
pypoint==3.0.0
|
||||
|
||||
# homeassistant.components.portainer
|
||||
pyportainer==1.0.14
|
||||
pyportainer==1.0.13
|
||||
|
||||
# homeassistant.components.probe_plus
|
||||
pyprobeplus==1.1.2
|
||||
@@ -3014,7 +3014,7 @@ ttls==1.8.3
|
||||
ttn_client==1.2.3
|
||||
|
||||
# homeassistant.components.tuya
|
||||
tuya-device-sharing-sdk==0.2.5
|
||||
tuya-device-sharing-sdk==0.2.4
|
||||
|
||||
# homeassistant.components.twentemilieu
|
||||
twentemilieu==2.2.1
|
||||
@@ -3222,7 +3222,7 @@ zeroconf==0.148.0
|
||||
zeversolar==0.3.2
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha==0.0.78
|
||||
zha==0.0.77
|
||||
|
||||
# homeassistant.components.zhong_hong
|
||||
zhong-hong-hvac==1.0.13
|
||||
|
||||
20
requirements_test_all.txt
generated
20
requirements_test_all.txt
generated
@@ -178,7 +178,7 @@ aioairzone-cloud==0.7.2
|
||||
aioairzone==1.0.2
|
||||
|
||||
# homeassistant.components.alexa_devices
|
||||
aioamazondevices==8.0.1
|
||||
aioamazondevices==6.5.6
|
||||
|
||||
# homeassistant.components.ambient_network
|
||||
# homeassistant.components.ambient_station
|
||||
@@ -581,7 +581,7 @@ blebox-uniapi==2.5.0
|
||||
blinkpy==0.24.1
|
||||
|
||||
# homeassistant.components.blue_current
|
||||
bluecurrent-api==1.3.2
|
||||
bluecurrent-api==1.3.1
|
||||
|
||||
# homeassistant.components.bluemaestro
|
||||
bluemaestro-ble==0.4.1
|
||||
@@ -1036,7 +1036,7 @@ holidays==0.84
|
||||
home-assistant-frontend==20251105.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2025.11.7
|
||||
home-assistant-intents==2025.10.28
|
||||
|
||||
# homeassistant.components.homematicip_cloud
|
||||
homematicip==2.3.1
|
||||
@@ -1281,7 +1281,7 @@ motionblindsble==0.1.3
|
||||
motioneye-client==0.3.14
|
||||
|
||||
# homeassistant.components.bang_olufsen
|
||||
mozart-api==5.1.0.247.1
|
||||
mozart-api==4.1.1.116.4
|
||||
|
||||
# homeassistant.components.mullvad
|
||||
mullvad-api==1.0.0
|
||||
@@ -1383,7 +1383,7 @@ omnilogic==0.4.5
|
||||
ondilo==0.5.0
|
||||
|
||||
# homeassistant.components.onedrive
|
||||
onedrive-personal-sdk==0.0.16
|
||||
onedrive-personal-sdk==0.0.15
|
||||
|
||||
# homeassistant.components.onvif
|
||||
onvif-zeep-async==4.0.4
|
||||
@@ -1456,7 +1456,7 @@ plexauth==0.0.6
|
||||
plexwebsocket==0.0.14
|
||||
|
||||
# homeassistant.components.plugwise
|
||||
plugwise==1.9.0
|
||||
plugwise==1.8.3
|
||||
|
||||
# homeassistant.components.poolsense
|
||||
poolsense==0.0.8
|
||||
@@ -1826,7 +1826,7 @@ pymonoprice==0.5
|
||||
pymysensors==0.26.0
|
||||
|
||||
# homeassistant.components.iron_os
|
||||
pynecil==4.2.1
|
||||
pynecil==4.2.0
|
||||
|
||||
# homeassistant.components.netgear
|
||||
pynetgear==0.10.10
|
||||
@@ -1906,7 +1906,7 @@ pyplaato==0.0.19
|
||||
pypoint==3.0.0
|
||||
|
||||
# homeassistant.components.portainer
|
||||
pyportainer==1.0.14
|
||||
pyportainer==1.0.13
|
||||
|
||||
# homeassistant.components.probe_plus
|
||||
pyprobeplus==1.1.2
|
||||
@@ -2487,7 +2487,7 @@ ttls==1.8.3
|
||||
ttn_client==1.2.3
|
||||
|
||||
# homeassistant.components.tuya
|
||||
tuya-device-sharing-sdk==0.2.5
|
||||
tuya-device-sharing-sdk==0.2.4
|
||||
|
||||
# homeassistant.components.twentemilieu
|
||||
twentemilieu==2.2.1
|
||||
@@ -2665,7 +2665,7 @@ zeroconf==0.148.0
|
||||
zeversolar==0.3.2
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha==0.0.78
|
||||
zha==0.0.77
|
||||
|
||||
# homeassistant.components.zwave_js
|
||||
zwave-js-server-python==0.67.1
|
||||
|
||||
2
script/hassfest/docker/Dockerfile
generated
2
script/hassfest/docker/Dockerfile
generated
@@ -32,7 +32,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.9.6,source=/uv,target=/bin/uv \
|
||||
go2rtc-client==0.2.1 \
|
||||
ha-ffmpeg==3.2.2 \
|
||||
hassil==3.4.0 \
|
||||
home-assistant-intents==2025.11.7 \
|
||||
home-assistant-intents==2025.10.28 \
|
||||
mutagen==1.47.0 \
|
||||
pymicro-vad==1.0.1 \
|
||||
pyspeex-noise==1.0.2
|
||||
|
||||
@@ -2263,7 +2263,7 @@ def validate_iqs_file(config: Config, integration: Integration) -> None:
|
||||
integration.add_error(
|
||||
"quality_scale",
|
||||
(
|
||||
"New integrations marked as internal should be added to INTEGRATIONS_WITHOUT_SCALE in script/hassfest/quality_scale.py."
|
||||
"New integrations marked as internal should be added to NO_QUALITY_SCALE in script/hassfest/quality_scale.py."
|
||||
if integration.quality_scale == "internal"
|
||||
else "Quality scale definition not found. New integrations are required to at least reach the Bronze tier."
|
||||
),
|
||||
|
||||
@@ -133,24 +133,6 @@ async def test_changing_password(data: hass_auth.Data) -> None:
|
||||
data.validate_login("test-UsEr", "new-pass")
|
||||
|
||||
|
||||
async def test_password_truncated(data: hass_auth.Data) -> None:
|
||||
"""Test long passwords are truncated before they are send to bcrypt for hashing.
|
||||
|
||||
With bcrypt 5.0 passing a password longer than 72 bytes raises a ValueError.
|
||||
Previously the password was silently truncated.
|
||||
https://github.com/pyca/bcrypt/pull/1000
|
||||
"""
|
||||
pwd_truncated = "hWwjDpFiYtDTaaMbXdjzeuKAPI3G4Di2mC92" * 4 # 72 chars
|
||||
long_pwd = pwd_truncated * 2 # 144 chars
|
||||
data.add_auth("test-user", long_pwd)
|
||||
data.validate_login("test-user", long_pwd)
|
||||
|
||||
# As pwd are truncated, login will technically work with only the first 72 bytes.
|
||||
data.validate_login("test-user", pwd_truncated)
|
||||
with pytest.raises(hass_auth.InvalidAuth):
|
||||
data.validate_login("test-user", pwd_truncated[:71])
|
||||
|
||||
|
||||
async def test_login_flow_validates(data: hass_auth.Data, hass: HomeAssistant) -> None:
|
||||
"""Test login flow."""
|
||||
data.add_auth("test-user", "test-pass")
|
||||
|
||||
@@ -135,27 +135,6 @@ WAVE_ENHANCE_SERVICE_INFO = BluetoothServiceInfoBleak(
|
||||
tx_power=0,
|
||||
)
|
||||
|
||||
CORENTIUM_HOME_2_SERVICE_INFO = BluetoothServiceInfoBleak(
|
||||
name="cc-cc-cc-cc-cc-cc",
|
||||
address="cc:cc:cc:cc:cc:cc",
|
||||
device=generate_ble_device(
|
||||
address="cc:cc:cc:cc:cc:cc",
|
||||
name="Airthings Corentium Home 2",
|
||||
),
|
||||
rssi=-61,
|
||||
manufacturer_data={820: b"\xe4/\xa5\xae\t\x00"},
|
||||
service_data={},
|
||||
service_uuids=[],
|
||||
source="local",
|
||||
advertisement=generate_advertisement_data(
|
||||
manufacturer_data={820: b"\xe4/\xa5\xae\t\x00"},
|
||||
service_uuids=[],
|
||||
),
|
||||
connectable=True,
|
||||
time=0,
|
||||
tx_power=0,
|
||||
)
|
||||
|
||||
VIEW_PLUS_SERVICE_INFO = BluetoothServiceInfoBleak(
|
||||
name="cc-cc-cc-cc-cc-cc",
|
||||
address="cc:cc:cc:cc:cc:cc",
|
||||
@@ -286,24 +265,6 @@ WAVE_ENHANCE_DEVICE_INFO = AirthingsDevice(
|
||||
address="cc:cc:cc:cc:cc:cc",
|
||||
)
|
||||
|
||||
CORENTIUM_HOME_2_DEVICE_INFO = AirthingsDevice(
|
||||
manufacturer="Airthings AS",
|
||||
hw_version="REV X",
|
||||
sw_version="R-SUB-1.3.4-master+0",
|
||||
model=AirthingsDeviceType.CORENTIUM_HOME_2,
|
||||
name="Airthings Corentium Home 2",
|
||||
identifier="123456",
|
||||
sensors={
|
||||
"connectivity_mode": "Bluetooth",
|
||||
"battery": 90,
|
||||
"temperature": 20.0,
|
||||
"humidity": 55.0,
|
||||
"radon_1day_avg": 45,
|
||||
"radon_1day_level": "low",
|
||||
},
|
||||
address="cc:cc:cc:cc:cc:cc",
|
||||
)
|
||||
|
||||
TEMPERATURE_V1 = MockEntity(
|
||||
unique_id="Airthings Wave Plus 123456_temperature",
|
||||
name="Airthings Wave Plus 123456 Temperature",
|
||||
|
||||
@@ -7,7 +7,7 @@ from bleak import BleakError
|
||||
from home_assistant_bluetooth import BluetoothServiceInfoBleak
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.airthings_ble.const import DEVICE_MODEL, DOMAIN
|
||||
from homeassistant.components.airthings_ble.const import DOMAIN
|
||||
from homeassistant.config_entries import SOURCE_BLUETOOTH, SOURCE_IGNORE, SOURCE_USER
|
||||
from homeassistant.const import CONF_ADDRESS
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -29,13 +29,12 @@ from tests.common import MockConfigEntry
|
||||
|
||||
async def test_bluetooth_discovery(hass: HomeAssistant) -> None:
|
||||
"""Test discovery via bluetooth with a valid device."""
|
||||
wave_plus_device = AirthingsDeviceType.WAVE_PLUS
|
||||
with (
|
||||
patch_async_ble_device_from_address(WAVE_SERVICE_INFO),
|
||||
patch_airthings_ble(
|
||||
AirthingsDevice(
|
||||
manufacturer="Airthings AS",
|
||||
model=wave_plus_device,
|
||||
model=AirthingsDeviceType.WAVE_PLUS,
|
||||
name="Airthings Wave Plus",
|
||||
identifier="123456",
|
||||
)
|
||||
@@ -61,8 +60,6 @@ async def test_bluetooth_discovery(hass: HomeAssistant) -> None:
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "Airthings Wave Plus (2930123456)"
|
||||
assert result["result"].unique_id == "cc:cc:cc:cc:cc:cc"
|
||||
assert result["data"] == {DEVICE_MODEL: wave_plus_device.value}
|
||||
assert result["result"].data == {DEVICE_MODEL: wave_plus_device.value}
|
||||
|
||||
|
||||
async def test_bluetooth_discovery_no_BLEDevice(hass: HomeAssistant) -> None:
|
||||
@@ -121,7 +118,6 @@ async def test_bluetooth_discovery_already_setup(hass: HomeAssistant) -> None:
|
||||
|
||||
async def test_user_setup(hass: HomeAssistant) -> None:
|
||||
"""Test the user initiated form."""
|
||||
wave_plus_device = AirthingsDeviceType.WAVE_PLUS
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.airthings_ble.config_flow.async_discovered_service_info",
|
||||
@@ -131,7 +127,7 @@ async def test_user_setup(hass: HomeAssistant) -> None:
|
||||
patch_airthings_ble(
|
||||
AirthingsDevice(
|
||||
manufacturer="Airthings AS",
|
||||
model=wave_plus_device,
|
||||
model=AirthingsDeviceType.WAVE_PLUS,
|
||||
name="Airthings Wave Plus",
|
||||
identifier="123456",
|
||||
)
|
||||
@@ -162,8 +158,6 @@ async def test_user_setup(hass: HomeAssistant) -> None:
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "Airthings Wave Plus (2930123456)"
|
||||
assert result["result"].unique_id == "cc:cc:cc:cc:cc:cc"
|
||||
assert result["data"] == {DEVICE_MODEL: wave_plus_device.value}
|
||||
assert result["result"].data == {DEVICE_MODEL: wave_plus_device.value}
|
||||
|
||||
|
||||
async def test_user_setup_replaces_ignored_device(hass: HomeAssistant) -> None:
|
||||
@@ -174,7 +168,6 @@ async def test_user_setup_replaces_ignored_device(hass: HomeAssistant) -> None:
|
||||
source=SOURCE_IGNORE,
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
wave_plus_device = AirthingsDeviceType.WAVE_PLUS
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.airthings_ble.config_flow.async_discovered_service_info",
|
||||
@@ -184,7 +177,7 @@ async def test_user_setup_replaces_ignored_device(hass: HomeAssistant) -> None:
|
||||
patch_airthings_ble(
|
||||
AirthingsDevice(
|
||||
manufacturer="Airthings AS",
|
||||
model=wave_plus_device,
|
||||
model=AirthingsDeviceType.WAVE_PLUS,
|
||||
name="Airthings Wave Plus",
|
||||
identifier="123456",
|
||||
)
|
||||
@@ -215,8 +208,6 @@ async def test_user_setup_replaces_ignored_device(hass: HomeAssistant) -> None:
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "Airthings Wave Plus (2930123456)"
|
||||
assert result["result"].unique_id == "cc:cc:cc:cc:cc:cc"
|
||||
assert result["data"] == {DEVICE_MODEL: wave_plus_device.value}
|
||||
assert result["result"].data == {DEVICE_MODEL: wave_plus_device.value}
|
||||
|
||||
|
||||
async def test_user_setup_no_device(hass: HomeAssistant) -> None:
|
||||
|
||||
@@ -1,192 +0,0 @@
|
||||
"""Test the Airthings BLE integration init."""
|
||||
|
||||
from copy import deepcopy
|
||||
|
||||
from airthings_ble import AirthingsDeviceType
|
||||
from freezegun.api import FrozenDateTimeFactory
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.airthings_ble.const import (
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DEVICE_MODEL,
|
||||
DEVICE_SPECIFIC_SCAN_INTERVAL,
|
||||
DOMAIN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import (
|
||||
CORENTIUM_HOME_2_DEVICE_INFO,
|
||||
CORENTIUM_HOME_2_SERVICE_INFO,
|
||||
WAVE_DEVICE_INFO,
|
||||
WAVE_ENHANCE_DEVICE_INFO,
|
||||
WAVE_ENHANCE_SERVICE_INFO,
|
||||
WAVE_SERVICE_INFO,
|
||||
patch_airthings_ble,
|
||||
patch_async_ble_device_from_address,
|
||||
)
|
||||
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||
from tests.components.bluetooth import inject_bluetooth_service_info
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("service_info", "device_info"),
|
||||
[
|
||||
(WAVE_SERVICE_INFO, WAVE_DEVICE_INFO),
|
||||
(WAVE_ENHANCE_SERVICE_INFO, WAVE_ENHANCE_DEVICE_INFO),
|
||||
(CORENTIUM_HOME_2_SERVICE_INFO, CORENTIUM_HOME_2_DEVICE_INFO),
|
||||
],
|
||||
)
|
||||
async def test_migration_existing_entries(
|
||||
hass: HomeAssistant,
|
||||
service_info,
|
||||
device_info,
|
||||
) -> None:
|
||||
"""Test migration of existing config entry without device model."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
unique_id=service_info.address,
|
||||
data={},
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
inject_bluetooth_service_info(hass, service_info)
|
||||
|
||||
assert DEVICE_MODEL not in entry.data
|
||||
|
||||
with (
|
||||
patch_async_ble_device_from_address(service_info.device),
|
||||
patch_airthings_ble(device_info),
|
||||
):
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Migration should have added device_model to entry data
|
||||
assert DEVICE_MODEL in entry.data
|
||||
assert entry.data[DEVICE_MODEL] == device_info.model.value
|
||||
|
||||
|
||||
async def test_no_migration_when_device_model_exists(
|
||||
hass: HomeAssistant,
|
||||
) -> None:
|
||||
"""Test that migration does not run when device_model already exists."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
unique_id=WAVE_SERVICE_INFO.address,
|
||||
data={DEVICE_MODEL: WAVE_DEVICE_INFO.model.value},
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
inject_bluetooth_service_info(hass, WAVE_SERVICE_INFO)
|
||||
|
||||
with (
|
||||
patch_async_ble_device_from_address(WAVE_SERVICE_INFO.device),
|
||||
patch_airthings_ble(WAVE_DEVICE_INFO) as mock_update,
|
||||
):
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Should have only 1 call for initial refresh (no migration call)
|
||||
assert mock_update.call_count == 1
|
||||
assert entry.data[DEVICE_MODEL] == WAVE_DEVICE_INFO.model.value
|
||||
|
||||
|
||||
async def test_scan_interval_corentium_home_2(
|
||||
hass: HomeAssistant, freezer: FrozenDateTimeFactory
|
||||
) -> None:
|
||||
"""Test that coordinator uses radon scan interval for Corentium Home 2."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
unique_id=WAVE_SERVICE_INFO.address,
|
||||
data={DEVICE_MODEL: CORENTIUM_HOME_2_DEVICE_INFO.model.value},
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
inject_bluetooth_service_info(hass, WAVE_SERVICE_INFO)
|
||||
|
||||
with (
|
||||
patch_async_ble_device_from_address(WAVE_SERVICE_INFO.device),
|
||||
patch_airthings_ble(CORENTIUM_HOME_2_DEVICE_INFO),
|
||||
):
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert (
|
||||
hass.states.get("sensor.airthings_corentium_home_2_123456_battery").state
|
||||
== "90"
|
||||
)
|
||||
|
||||
changed_info = deepcopy(CORENTIUM_HOME_2_DEVICE_INFO)
|
||||
changed_info.sensors["battery"] = 89
|
||||
|
||||
with patch_airthings_ble(changed_info):
|
||||
freezer.tick(DEFAULT_SCAN_INTERVAL)
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert (
|
||||
hass.states.get("sensor.airthings_corentium_home_2_123456_battery").state
|
||||
== "90"
|
||||
)
|
||||
|
||||
freezer.tick(
|
||||
DEVICE_SPECIFIC_SCAN_INTERVAL.get(
|
||||
AirthingsDeviceType.CORENTIUM_HOME_2.value
|
||||
)
|
||||
- DEFAULT_SCAN_INTERVAL
|
||||
)
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert (
|
||||
hass.states.get("sensor.airthings_corentium_home_2_123456_battery").state
|
||||
== "89"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("service_info", "device_info", "battery_entity_id"),
|
||||
[
|
||||
(WAVE_SERVICE_INFO, WAVE_DEVICE_INFO, "sensor.airthings_wave_123456_battery"),
|
||||
(
|
||||
WAVE_ENHANCE_SERVICE_INFO,
|
||||
WAVE_ENHANCE_DEVICE_INFO,
|
||||
"sensor.airthings_wave_enhance_123456_battery",
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_coordinator_default_scan_interval(
|
||||
hass: HomeAssistant,
|
||||
service_info,
|
||||
device_info,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
battery_entity_id: str,
|
||||
) -> None:
|
||||
"""Test that coordinator uses default scan interval."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
unique_id=service_info.address,
|
||||
data={DEVICE_MODEL: device_info.model.value},
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
inject_bluetooth_service_info(hass, service_info)
|
||||
|
||||
with (
|
||||
patch_async_ble_device_from_address(service_info.device),
|
||||
patch_airthings_ble(device_info),
|
||||
):
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get(battery_entity_id).state == "85"
|
||||
|
||||
changed_info = deepcopy(device_info)
|
||||
changed_info.sensors["battery"] = 84
|
||||
|
||||
with patch_airthings_ble(changed_info):
|
||||
freezer.tick(DEFAULT_SCAN_INTERVAL)
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get(battery_entity_id).state == "84"
|
||||
@@ -1,17 +1,10 @@
|
||||
"""Test the Airthings Wave sensor."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from freezegun.api import FrozenDateTimeFactory
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.airthings_ble.const import (
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DEVICE_MODEL,
|
||||
DEVICE_SPECIFIC_SCAN_INTERVAL,
|
||||
DOMAIN,
|
||||
)
|
||||
from homeassistant.components.airthings_ble.const import DOMAIN
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
@@ -19,7 +12,6 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from . import (
|
||||
CO2_V1,
|
||||
CO2_V2,
|
||||
CORENTIUM_HOME_2_DEVICE_INFO,
|
||||
HUMIDITY_V2,
|
||||
TEMPERATURE_V1,
|
||||
VOC_V1,
|
||||
@@ -29,8 +21,6 @@ from . import (
|
||||
WAVE_ENHANCE_DEVICE_INFO,
|
||||
WAVE_ENHANCE_SERVICE_INFO,
|
||||
WAVE_SERVICE_INFO,
|
||||
AirthingsDevice,
|
||||
BluetoothServiceInfoBleak,
|
||||
create_device,
|
||||
create_entry,
|
||||
patch_airthings_ble,
|
||||
@@ -39,7 +29,6 @@ from . import (
|
||||
patch_async_discovered_service_info,
|
||||
)
|
||||
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||
from tests.components.bluetooth import inject_bluetooth_service_info
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -278,102 +267,3 @@ async def test_translation_keys(
|
||||
|
||||
expected_name = f"Airthings Wave Enhance (123456) {expected_sensor_name}"
|
||||
assert state.attributes.get("friendly_name") == expected_name
|
||||
|
||||
|
||||
async def test_scan_interval_migration_corentium_home_2(
|
||||
hass: HomeAssistant,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
) -> None:
|
||||
"""Test that radon device migration uses 30-minute scan interval."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
unique_id=WAVE_SERVICE_INFO.address,
|
||||
data={},
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
inject_bluetooth_service_info(hass, WAVE_SERVICE_INFO)
|
||||
|
||||
with (
|
||||
patch_async_ble_device_from_address(WAVE_SERVICE_INFO.device),
|
||||
patch_airthings_ble(CORENTIUM_HOME_2_DEVICE_INFO) as mock_update,
|
||||
):
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Migration should have added device_model to entry data
|
||||
assert DEVICE_MODEL in entry.data
|
||||
assert entry.data[DEVICE_MODEL] == CORENTIUM_HOME_2_DEVICE_INFO.model.value
|
||||
|
||||
# Coordinator should have been configured with radon scan interval
|
||||
coordinator = entry.runtime_data
|
||||
assert coordinator.update_interval == timedelta(
|
||||
seconds=DEVICE_SPECIFIC_SCAN_INTERVAL.get(
|
||||
CORENTIUM_HOME_2_DEVICE_INFO.model.value
|
||||
)
|
||||
)
|
||||
|
||||
# Should have 2 calls: 1 for migration + 1 for initial refresh
|
||||
assert mock_update.call_count == 2
|
||||
|
||||
# Fast forward by default interval (300s) - should NOT trigger update
|
||||
freezer.tick(DEFAULT_SCAN_INTERVAL)
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
assert mock_update.call_count == 2
|
||||
|
||||
# Fast forward to radon interval (1800s) - should trigger update
|
||||
freezer.tick(
|
||||
DEVICE_SPECIFIC_SCAN_INTERVAL.get(CORENTIUM_HOME_2_DEVICE_INFO.model.value)
|
||||
)
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
assert mock_update.call_count == 3
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("service_info", "device_info"),
|
||||
[
|
||||
(WAVE_SERVICE_INFO, WAVE_DEVICE_INFO),
|
||||
(WAVE_ENHANCE_SERVICE_INFO, WAVE_ENHANCE_DEVICE_INFO),
|
||||
],
|
||||
)
|
||||
async def test_default_scan_interval_migration(
|
||||
hass: HomeAssistant,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
service_info: BluetoothServiceInfoBleak,
|
||||
device_info: AirthingsDevice,
|
||||
) -> None:
|
||||
"""Test that non-radon device migration uses default 5-minute scan interval."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
unique_id=service_info.address,
|
||||
data={},
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
inject_bluetooth_service_info(hass, service_info)
|
||||
|
||||
with (
|
||||
patch_async_ble_device_from_address(service_info.device),
|
||||
patch_airthings_ble(device_info) as mock_update,
|
||||
):
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Migration should have added device_model to entry data
|
||||
assert DEVICE_MODEL in entry.data
|
||||
assert entry.data[DEVICE_MODEL] == device_info.model.value
|
||||
|
||||
# Coordinator should have been configured with default scan interval
|
||||
coordinator = entry.runtime_data
|
||||
assert coordinator.update_interval == timedelta(seconds=DEFAULT_SCAN_INTERVAL)
|
||||
|
||||
# Should have 2 calls: 1 for migration + 1 for initial refresh
|
||||
assert mock_update.call_count == 2
|
||||
|
||||
# Fast forward by default interval (300s) - SHOULD trigger update
|
||||
freezer.tick(DEFAULT_SCAN_INTERVAL)
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
assert mock_update.call_count == 3
|
||||
|
||||
@@ -4,7 +4,7 @@ from collections.abc import Generator
|
||||
from copy import deepcopy
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from aioamazondevices.const.devices import DEVICE_TYPE_TO_MODEL
|
||||
from aioamazondevices.const import DEVICE_TYPE_TO_MODEL
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.alexa_devices.const import (
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from aioamazondevices.const.schedules import (
|
||||
from aioamazondevices.api import AmazonDevice, AmazonDeviceSensor, AmazonSchedule
|
||||
from aioamazondevices.const import (
|
||||
NOTIFICATION_ALARM,
|
||||
NOTIFICATION_REMINDER,
|
||||
NOTIFICATION_TIMER,
|
||||
)
|
||||
from aioamazondevices.structures import AmazonDevice, AmazonDeviceSensor, AmazonSchedule
|
||||
|
||||
TEST_CODE = "023123"
|
||||
TEST_PASSWORD = "fake_password"
|
||||
|
||||
@@ -14,20 +14,12 @@
|
||||
'online': True,
|
||||
'sensors': dict({
|
||||
'dnd': dict({
|
||||
'error': False,
|
||||
'error_msg': None,
|
||||
'error_type': None,
|
||||
'name': 'dnd',
|
||||
'scale': None,
|
||||
'value': False,
|
||||
'__type': "<class 'aioamazondevices.api.AmazonDeviceSensor'>",
|
||||
'repr': "AmazonDeviceSensor(name='dnd', value=False, error=False, error_type=None, error_msg=None, scale=None)",
|
||||
}),
|
||||
'temperature': dict({
|
||||
'error': False,
|
||||
'error_msg': None,
|
||||
'error_type': None,
|
||||
'name': 'temperature',
|
||||
'scale': 'CELSIUS',
|
||||
'value': '22.5',
|
||||
'__type': "<class 'aioamazondevices.api.AmazonDeviceSensor'>",
|
||||
'repr': "AmazonDeviceSensor(name='temperature', value='22.5', error=False, error_type=None, error_msg=None, scale='CELSIUS')",
|
||||
}),
|
||||
}),
|
||||
'serial number': 'echo_test_serial_number',
|
||||
@@ -52,20 +44,12 @@
|
||||
'online': True,
|
||||
'sensors': dict({
|
||||
'dnd': dict({
|
||||
'error': False,
|
||||
'error_msg': None,
|
||||
'error_type': None,
|
||||
'name': 'dnd',
|
||||
'scale': None,
|
||||
'value': False,
|
||||
'__type': "<class 'aioamazondevices.api.AmazonDeviceSensor'>",
|
||||
'repr': "AmazonDeviceSensor(name='dnd', value=False, error=False, error_type=None, error_msg=None, scale=None)",
|
||||
}),
|
||||
'temperature': dict({
|
||||
'error': False,
|
||||
'error_msg': None,
|
||||
'error_type': None,
|
||||
'name': 'temperature',
|
||||
'scale': 'CELSIUS',
|
||||
'value': '22.5',
|
||||
'__type': "<class 'aioamazondevices.api.AmazonDeviceSensor'>",
|
||||
'repr': "AmazonDeviceSensor(name='temperature', value='22.5', error=False, error_type=None, error_msg=None, scale='CELSIUS')",
|
||||
}),
|
||||
}),
|
||||
'serial number': 'echo_test_serial_number',
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
from aioamazondevices.const.devices import SPEAKER_GROUP_FAMILY, SPEAKER_GROUP_MODEL
|
||||
from aioamazondevices.const import SPEAKER_GROUP_FAMILY, SPEAKER_GROUP_MODEL
|
||||
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
||||
import pytest
|
||||
|
||||
|
||||
@@ -11,11 +11,11 @@ from mozart_api.models import (
|
||||
ListeningMode,
|
||||
ListeningModeFeatures,
|
||||
ListeningModeRef,
|
||||
ListeningModeTrigger,
|
||||
PlaybackContentMetadata,
|
||||
PlaybackProgress,
|
||||
PlaybackState,
|
||||
PlayQueueSettings,
|
||||
PowerLinkTrigger,
|
||||
ProductState,
|
||||
RemoteMenuItem,
|
||||
RenderingState,
|
||||
@@ -334,19 +334,19 @@ def mock_mozart_client() -> Generator[AsyncMock]:
|
||||
id=TEST_SOUND_MODE,
|
||||
name=TEST_SOUND_MODE_NAME,
|
||||
features=ListeningModeFeatures(),
|
||||
triggers=[PowerLinkTrigger()],
|
||||
triggers=[ListeningModeTrigger()],
|
||||
),
|
||||
ListeningMode(
|
||||
id=TEST_SOUND_MODE_2,
|
||||
name=TEST_SOUND_MODE_NAME,
|
||||
features=ListeningModeFeatures(),
|
||||
triggers=[PowerLinkTrigger()],
|
||||
triggers=[ListeningModeTrigger()],
|
||||
),
|
||||
ListeningMode(
|
||||
id=345,
|
||||
name=f"{TEST_SOUND_MODE_NAME} 2",
|
||||
features=ListeningModeFeatures(),
|
||||
triggers=[PowerLinkTrigger()],
|
||||
triggers=[ListeningModeTrigger()],
|
||||
),
|
||||
]
|
||||
client.get_active_listening_mode = AsyncMock()
|
||||
|
||||
@@ -31,19 +31,16 @@ from homeassistant.components.bluetooth.passive_update_processor import (
|
||||
PassiveBluetoothEntityKey,
|
||||
PassiveBluetoothProcessorCoordinator,
|
||||
PassiveBluetoothProcessorEntity,
|
||||
deserialize_entity_description,
|
||||
)
|
||||
from homeassistant.components.sensor import (
|
||||
DOMAIN as SENSOR_DOMAIN,
|
||||
SensorDeviceClass,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import current_entry
|
||||
from homeassistant.const import UnitOfTemperature
|
||||
from homeassistant.core import CoreState, HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.typing import UNDEFINED
|
||||
from homeassistant.setup import async_setup_component
|
||||
from homeassistant.util import dt as dt_util
|
||||
@@ -1923,55 +1920,3 @@ async def test_naming(hass: HomeAssistant) -> None:
|
||||
assert sensor_entity.translation_key is None
|
||||
|
||||
cancel_coordinator()
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("description_type", "description_dict", "expected_description"),
|
||||
[
|
||||
(
|
||||
SensorEntityDescription,
|
||||
{
|
||||
"key": "humidity",
|
||||
"native_unit_of_measurement": "%",
|
||||
"device_class": "humidity",
|
||||
"state_class": "measurement",
|
||||
},
|
||||
SensorEntityDescription(
|
||||
key="humidity",
|
||||
native_unit_of_measurement="%",
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
),
|
||||
(
|
||||
BinarySensorEntityDescription,
|
||||
{
|
||||
"key": "motion",
|
||||
"device_class": "motion",
|
||||
},
|
||||
BinarySensorEntityDescription(
|
||||
key="motion",
|
||||
device_class=BinarySensorDeviceClass.MOTION,
|
||||
),
|
||||
),
|
||||
(
|
||||
SensorEntityDescription,
|
||||
{
|
||||
"key": "temperature",
|
||||
"name": None,
|
||||
},
|
||||
SensorEntityDescription(
|
||||
key="temperature",
|
||||
name=None,
|
||||
),
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_deserialize_entity_description(
|
||||
description_type: type[EntityDescription],
|
||||
description_dict: dict[str, Any],
|
||||
expected_description: EntityDescription,
|
||||
) -> None:
|
||||
"""Test deserializing an entity description."""
|
||||
description = deserialize_entity_description(description_type, description_dict)
|
||||
assert description == expected_description
|
||||
|
||||
@@ -133,8 +133,8 @@ _LOGGER = logging.getLogger(__name__)
|
||||
None,
|
||||
[
|
||||
{
|
||||
"sensor_entity": "sensor.test_device_18b2_weight",
|
||||
"friendly_name": "Test Device 18B2 Weight",
|
||||
"sensor_entity": "sensor.test_device_18b2_mass",
|
||||
"friendly_name": "Test Device 18B2 Mass",
|
||||
"unit_of_measurement": "kg",
|
||||
"state_class": "measurement",
|
||||
"expected_state": "80.3",
|
||||
@@ -150,8 +150,8 @@ _LOGGER = logging.getLogger(__name__)
|
||||
None,
|
||||
[
|
||||
{
|
||||
"sensor_entity": "sensor.test_device_18b2_weight",
|
||||
"friendly_name": "Test Device 18B2 Weight",
|
||||
"sensor_entity": "sensor.test_device_18b2_mass",
|
||||
"friendly_name": "Test Device 18B2 Mass",
|
||||
"unit_of_measurement": "lb",
|
||||
"state_class": "measurement",
|
||||
"expected_state": "74.86",
|
||||
@@ -168,7 +168,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
[
|
||||
{
|
||||
"sensor_entity": "sensor.test_device_18b2_dew_point",
|
||||
"friendly_name": "Test Device 18B2 Dew point",
|
||||
"friendly_name": "Test Device 18B2 Dew Point",
|
||||
"unit_of_measurement": "°C",
|
||||
"state_class": "measurement",
|
||||
"expected_state": "17.38",
|
||||
@@ -252,14 +252,14 @@ _LOGGER = logging.getLogger(__name__)
|
||||
[
|
||||
{
|
||||
"sensor_entity": "sensor.test_device_18b2_pm10",
|
||||
"friendly_name": "Test Device 18B2 PM10",
|
||||
"friendly_name": "Test Device 18B2 Pm10",
|
||||
"unit_of_measurement": "μg/m³",
|
||||
"state_class": "measurement",
|
||||
"expected_state": "7170",
|
||||
},
|
||||
{
|
||||
"sensor_entity": "sensor.test_device_18b2_pm2_5",
|
||||
"friendly_name": "Test Device 18B2 PM2.5",
|
||||
"sensor_entity": "sensor.test_device_18b2_pm25",
|
||||
"friendly_name": "Test Device 18B2 Pm25",
|
||||
"unit_of_measurement": "μg/m³",
|
||||
"state_class": "measurement",
|
||||
"expected_state": "3090",
|
||||
@@ -276,7 +276,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
[
|
||||
{
|
||||
"sensor_entity": "sensor.test_device_18b2_carbon_dioxide",
|
||||
"friendly_name": "Test Device 18B2 Carbon dioxide",
|
||||
"friendly_name": "Test Device 18B2 Carbon Dioxide",
|
||||
"unit_of_measurement": "ppm",
|
||||
"state_class": "measurement",
|
||||
"expected_state": "1250",
|
||||
@@ -295,7 +295,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
"sensor_entity": (
|
||||
"sensor.test_device_18b2_volatile_organic_compounds"
|
||||
),
|
||||
"friendly_name": "Test Device 18B2 Volatile organic compounds",
|
||||
"friendly_name": "Test Device 18B2 Volatile Organic Compounds",
|
||||
"unit_of_measurement": "μg/m³",
|
||||
"state_class": "measurement",
|
||||
"expected_state": "307",
|
||||
@@ -487,8 +487,8 @@ async def test_v1_sensors(
|
||||
None,
|
||||
[
|
||||
{
|
||||
"sensor_entity": "sensor.test_device_18b2_weight",
|
||||
"friendly_name": "Test Device 18B2 Weight",
|
||||
"sensor_entity": "sensor.test_device_18b2_mass",
|
||||
"friendly_name": "Test Device 18B2 Mass",
|
||||
"unit_of_measurement": "kg",
|
||||
"state_class": "measurement",
|
||||
"expected_state": "80.3",
|
||||
@@ -504,8 +504,8 @@ async def test_v1_sensors(
|
||||
None,
|
||||
[
|
||||
{
|
||||
"sensor_entity": "sensor.test_device_18b2_weight",
|
||||
"friendly_name": "Test Device 18B2 Weight",
|
||||
"sensor_entity": "sensor.test_device_18b2_mass",
|
||||
"friendly_name": "Test Device 18B2 Mass",
|
||||
"unit_of_measurement": "lb",
|
||||
"state_class": "measurement",
|
||||
"expected_state": "74.86",
|
||||
@@ -522,7 +522,7 @@ async def test_v1_sensors(
|
||||
[
|
||||
{
|
||||
"sensor_entity": "sensor.test_device_18b2_dew_point",
|
||||
"friendly_name": "Test Device 18B2 Dew point",
|
||||
"friendly_name": "Test Device 18B2 Dew Point",
|
||||
"unit_of_measurement": "°C",
|
||||
"state_class": "measurement",
|
||||
"expected_state": "17.38",
|
||||
@@ -606,14 +606,14 @@ async def test_v1_sensors(
|
||||
[
|
||||
{
|
||||
"sensor_entity": "sensor.test_device_18b2_pm10",
|
||||
"friendly_name": "Test Device 18B2 PM10",
|
||||
"friendly_name": "Test Device 18B2 Pm10",
|
||||
"unit_of_measurement": "μg/m³",
|
||||
"state_class": "measurement",
|
||||
"expected_state": "7170",
|
||||
},
|
||||
{
|
||||
"sensor_entity": "sensor.test_device_18b2_pm2_5",
|
||||
"friendly_name": "Test Device 18B2 PM2.5",
|
||||
"sensor_entity": "sensor.test_device_18b2_pm25",
|
||||
"friendly_name": "Test Device 18B2 Pm25",
|
||||
"unit_of_measurement": "μg/m³",
|
||||
"state_class": "measurement",
|
||||
"expected_state": "3090",
|
||||
@@ -630,7 +630,7 @@ async def test_v1_sensors(
|
||||
[
|
||||
{
|
||||
"sensor_entity": "sensor.test_device_18b2_carbon_dioxide",
|
||||
"friendly_name": "Test Device 18B2 Carbon dioxide",
|
||||
"friendly_name": "Test Device 18B2 Carbon Dioxide",
|
||||
"unit_of_measurement": "ppm",
|
||||
"state_class": "measurement",
|
||||
"expected_state": "1250",
|
||||
@@ -649,7 +649,7 @@ async def test_v1_sensors(
|
||||
"sensor_entity": (
|
||||
"sensor.test_device_18b2_volatile_organic_compounds"
|
||||
),
|
||||
"friendly_name": "Test Device 18B2 Volatile organic compounds",
|
||||
"friendly_name": "Test Device 18B2 Volatile Organic Compounds",
|
||||
"unit_of_measurement": "μg/m³",
|
||||
"state_class": "measurement",
|
||||
"expected_state": "307",
|
||||
@@ -802,7 +802,7 @@ async def test_v1_sensors(
|
||||
[
|
||||
{
|
||||
"sensor_entity": "sensor.test_device_18b2_uv_index",
|
||||
"friendly_name": "Test Device 18B2 UV Index",
|
||||
"friendly_name": "Test Device 18B2 Uv Index",
|
||||
"state_class": "measurement",
|
||||
"expected_state": "5.0",
|
||||
},
|
||||
@@ -852,7 +852,7 @@ async def test_v1_sensors(
|
||||
[
|
||||
{
|
||||
"sensor_entity": "sensor.test_device_18b2_volume_flow_rate",
|
||||
"friendly_name": "Test Device 18B2 Volume flow rate",
|
||||
"friendly_name": "Test Device 18B2 Volume Flow Rate",
|
||||
"unit_of_measurement": "m³/h",
|
||||
"state_class": "measurement",
|
||||
"expected_state": "34.78",
|
||||
@@ -982,8 +982,8 @@ async def test_v1_sensors(
|
||||
None,
|
||||
[
|
||||
{
|
||||
"sensor_entity": "sensor.test_device_18b2_stored_volume",
|
||||
"friendly_name": "Test Device 18B2 Stored volume",
|
||||
"sensor_entity": "sensor.test_device_18b2_volume_storage",
|
||||
"friendly_name": "Test Device 18B2 Volume Storage",
|
||||
"unit_of_measurement": "L",
|
||||
"state_class": "measurement",
|
||||
"expected_state": "19551.879",
|
||||
@@ -999,15 +999,15 @@ async def test_v1_sensors(
|
||||
None,
|
||||
[
|
||||
{
|
||||
"sensor_entity": "sensor.test_device_18b2_temperature",
|
||||
"friendly_name": "Test Device 18B2 Temperature",
|
||||
"sensor_entity": "sensor.test_device_18b2_temperature_1",
|
||||
"friendly_name": "Test Device 18B2 Temperature 1",
|
||||
"unit_of_measurement": "°C",
|
||||
"state_class": "measurement",
|
||||
"expected_state": "25.06",
|
||||
},
|
||||
{
|
||||
"sensor_entity": "sensor.test_device_18b2_temperature_2",
|
||||
"friendly_name": "Test Device 18B2 Temperature",
|
||||
"friendly_name": "Test Device 18B2 Temperature 2",
|
||||
"unit_of_measurement": "°C",
|
||||
"state_class": "measurement",
|
||||
"expected_state": "25.11",
|
||||
@@ -1023,36 +1023,36 @@ async def test_v1_sensors(
|
||||
None,
|
||||
[
|
||||
{
|
||||
"sensor_entity": "sensor.test_device_18b2_temperature",
|
||||
"friendly_name": "Test Device 18B2 Temperature",
|
||||
"sensor_entity": "sensor.test_device_18b2_temperature_1",
|
||||
"friendly_name": "Test Device 18B2 Temperature 1",
|
||||
"unit_of_measurement": "°C",
|
||||
"state_class": "measurement",
|
||||
"expected_state": "25.06",
|
||||
},
|
||||
{
|
||||
"sensor_entity": "sensor.test_device_18b2_temperature_2",
|
||||
"friendly_name": "Test Device 18B2 Temperature",
|
||||
"friendly_name": "Test Device 18B2 Temperature 2",
|
||||
"unit_of_measurement": "°C",
|
||||
"state_class": "measurement",
|
||||
"expected_state": "25.11",
|
||||
},
|
||||
{
|
||||
"sensor_entity": "sensor.test_device_18b2_temperature_3",
|
||||
"friendly_name": "Test Device 18B2 Temperature",
|
||||
"friendly_name": "Test Device 18B2 Temperature 3",
|
||||
"unit_of_measurement": "°C",
|
||||
"state_class": "measurement",
|
||||
"expected_state": "22.55",
|
||||
},
|
||||
{
|
||||
"sensor_entity": "sensor.test_device_18b2_humidity",
|
||||
"friendly_name": "Test Device 18B2 Humidity",
|
||||
"sensor_entity": "sensor.test_device_18b2_humidity_1",
|
||||
"friendly_name": "Test Device 18B2 Humidity 1",
|
||||
"unit_of_measurement": "%",
|
||||
"state_class": "measurement",
|
||||
"expected_state": "63.27",
|
||||
},
|
||||
{
|
||||
"sensor_entity": "sensor.test_device_18b2_humidity_2",
|
||||
"friendly_name": "Test Device 18B2 Humidity",
|
||||
"friendly_name": "Test Device 18B2 Humidity 2",
|
||||
"unit_of_measurement": "%",
|
||||
"state_class": "measurement",
|
||||
"expected_state": "60.71",
|
||||
|
||||
@@ -47,19 +47,7 @@ async def test_setup_entry_maintenance(
|
||||
|
||||
|
||||
async def test_setup_gateway_offline(hass: HomeAssistant) -> None:
|
||||
"""Test setup entry with one gateway online and one gateway offline."""
|
||||
entry = configure_integration(hass)
|
||||
test_gateway = HomeControlMock()
|
||||
with patch(
|
||||
"homeassistant.components.devolo_home_control.HomeControl",
|
||||
side_effect=[test_gateway, GatewayOfflineError],
|
||||
):
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
assert entry.state is ConfigEntryState.LOADED
|
||||
|
||||
|
||||
async def test_setup_all_gateways_offline(hass: HomeAssistant) -> None:
|
||||
"""Test setup entry fails on all gateways offline."""
|
||||
"""Test setup entry fails on gateway offline."""
|
||||
entry = configure_integration(hass)
|
||||
with patch(
|
||||
"homeassistant.components.devolo_home_control.HomeControl",
|
||||
|
||||
@@ -79,7 +79,7 @@ def setup_mock_foscam_camera(mock_foscam_camera):
|
||||
0,
|
||||
{
|
||||
"swCapabilities1": "100",
|
||||
"swCapabilities2": "896",
|
||||
"swCapabilities2": "768",
|
||||
"swCapabilities3": "100",
|
||||
"swCapabilities4": "100",
|
||||
},
|
||||
|
||||
@@ -72,12 +72,6 @@ MOCK_FB_SERVICES: dict[str, dict] = {
|
||||
"NewBytesReceived": 12045,
|
||||
},
|
||||
},
|
||||
"LANConfigSecurity1": {
|
||||
"X_AVM-DE_GetCurrentUser": {
|
||||
"NewX_AVM-DE_CurrentUsername": "fake_user",
|
||||
"NewX_AVM-DE_CurrentUserRights": "<rights><path>BoxAdmin</path><access>readwrite</access><path>Phone</path><access>readwrite</access><path>Dial</path><access>readwrite</access><path>NAS</path><access>none</access><path>HomeAuto</path><access>readwrite</access><path>App</path><access>readwrite</access></rights>",
|
||||
}
|
||||
},
|
||||
"Layer3Forwarding1": {
|
||||
"GetDefaultConnectionService": {
|
||||
"NewDefaultConnectionService": "1.WANPPPConnection.1"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user