mirror of
https://github.com/home-assistant/core.git
synced 2025-11-07 01:50:18 +00:00
Compare commits
2 Commits
dev
...
fix_progre
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
34aff535fd | ||
|
|
3b0f49e2ea |
@@ -6,8 +6,8 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Final
|
||||
|
||||
from aioamazondevices.const.metadata import SENSOR_STATE_OFF
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
from aioamazondevices.const import SENSOR_STATE_OFF
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
DOMAIN as BINARY_SENSOR_DOMAIN,
|
||||
|
||||
@@ -2,13 +2,12 @@
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from aioamazondevices.api import AmazonEchoApi
|
||||
from aioamazondevices.api import AmazonDevice, AmazonEchoApi
|
||||
from aioamazondevices.exceptions import (
|
||||
CannotAuthenticate,
|
||||
CannotConnect,
|
||||
CannotRetrieveData,
|
||||
)
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
from aiohttp import ClientSession
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
|
||||
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Defines a base Alexa Devices entity."""
|
||||
|
||||
from aioamazondevices.const.devices import SPEAKER_GROUP_MODEL
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
from aioamazondevices.const import SPEAKER_GROUP_MODEL
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==8.0.1"]
|
||||
"requirements": ["aioamazondevices==6.5.6"]
|
||||
}
|
||||
|
||||
@@ -6,9 +6,8 @@ from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Final
|
||||
|
||||
from aioamazondevices.api import AmazonEchoApi
|
||||
from aioamazondevices.const.devices import SPEAKER_GROUP_FAMILY
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
from aioamazondevices.api import AmazonDevice, AmazonEchoApi
|
||||
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
|
||||
|
||||
from homeassistant.components.notify import NotifyEntity, NotifyEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
@@ -7,12 +7,12 @@ from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Final
|
||||
|
||||
from aioamazondevices.const.schedules import (
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
from aioamazondevices.const import (
|
||||
NOTIFICATION_ALARM,
|
||||
NOTIFICATION_REMINDER,
|
||||
NOTIFICATION_TIMER,
|
||||
)
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Support for services."""
|
||||
|
||||
from aioamazondevices.const.sounds import SOUNDS_LIST
|
||||
from aioamazondevices.sounds import SOUNDS_LIST
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
|
||||
@@ -6,7 +6,7 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any, Final
|
||||
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
|
||||
from homeassistant.components.switch import (
|
||||
DOMAIN as SWITCH_DOMAIN,
|
||||
|
||||
@@ -4,7 +4,7 @@ from collections.abc import Awaitable, Callable, Coroutine
|
||||
from functools import wraps
|
||||
from typing import Any, Concatenate
|
||||
|
||||
from aioamazondevices.const.devices import SPEAKER_GROUP_FAMILY
|
||||
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
|
||||
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
||||
|
||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||
|
||||
@@ -63,7 +63,6 @@ BINARY_SENSOR_DESCRIPTIONS = {
|
||||
),
|
||||
BTHomeBinarySensorDeviceClass.GENERIC: BinarySensorEntityDescription(
|
||||
key=BTHomeBinarySensorDeviceClass.GENERIC,
|
||||
translation_key="generic",
|
||||
),
|
||||
BTHomeBinarySensorDeviceClass.LIGHT: BinarySensorEntityDescription(
|
||||
key=BTHomeBinarySensorDeviceClass.LIGHT,
|
||||
@@ -160,7 +159,10 @@ def sensor_update_to_bluetooth_data_update(
|
||||
device_key_to_bluetooth_entity_key(device_key): sensor_values.native_value
|
||||
for device_key, sensor_values in sensor_update.binary_entity_values.items()
|
||||
},
|
||||
entity_names={},
|
||||
entity_names={
|
||||
device_key_to_bluetooth_entity_key(device_key): sensor_values.name
|
||||
for device_key, sensor_values in sensor_update.binary_entity_values.items()
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -47,11 +47,6 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"generic": {
|
||||
"name": "Generic"
|
||||
}
|
||||
},
|
||||
"event": {
|
||||
"button": {
|
||||
"state_attributes": {
|
||||
|
||||
@@ -359,7 +359,7 @@ CLIMATE_KNX_SCHEMA = vol.Schema(
|
||||
write=False, state_required=True, valid_dpt="9.001"
|
||||
),
|
||||
vol.Optional(CONF_GA_HUMIDITY_CURRENT): GASelector(
|
||||
write=False, valid_dpt="9.007"
|
||||
write=False, valid_dpt="9.002"
|
||||
),
|
||||
vol.Required(CONF_TARGET_TEMPERATURE): GroupSelect(
|
||||
GroupSelectOption(
|
||||
|
||||
@@ -221,7 +221,7 @@ async def library_payload(hass):
|
||||
for child in library_info.children:
|
||||
child.thumbnail = "https://brands.home-assistant.io/_/kodi/logo.png"
|
||||
|
||||
with contextlib.suppress(BrowseError):
|
||||
with contextlib.suppress(media_source.BrowseError):
|
||||
item = await media_source.async_browse_media(
|
||||
hass, None, content_filter=media_source_content_filter
|
||||
)
|
||||
|
||||
@@ -41,9 +41,6 @@
|
||||
"energy_forecast": {
|
||||
"default": "mdi:lightning-bolt-outline"
|
||||
},
|
||||
"finish": {
|
||||
"default": "mdi:clock-end"
|
||||
},
|
||||
"plate": {
|
||||
"default": "mdi:circle-outline",
|
||||
"state": {
|
||||
@@ -86,9 +83,6 @@
|
||||
"spin_speed": {
|
||||
"default": "mdi:sync"
|
||||
},
|
||||
"start": {
|
||||
"default": "mdi:clock-start"
|
||||
},
|
||||
"start_time": {
|
||||
"default": "mdi:clock-start"
|
||||
},
|
||||
|
||||
@@ -4,7 +4,6 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Mapping
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from typing import Any, Final, cast
|
||||
|
||||
@@ -30,7 +29,6 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import (
|
||||
COFFEE_SYSTEM_PROFILE,
|
||||
@@ -104,47 +102,12 @@ def _get_coffee_profile(value: MieleDevice) -> str | None:
|
||||
return None
|
||||
|
||||
|
||||
def _convert_start_timestamp(
|
||||
elapsed_time_list: list[int], start_time_list: list[int]
|
||||
) -> datetime | None:
|
||||
"""Convert raw values representing time into start timestamp."""
|
||||
now = dt_util.utcnow()
|
||||
elapsed_duration = _convert_duration(elapsed_time_list)
|
||||
delayed_start_duration = _convert_duration(start_time_list)
|
||||
if (elapsed_duration is None or elapsed_duration == 0) and (
|
||||
delayed_start_duration is None or delayed_start_duration == 0
|
||||
):
|
||||
return None
|
||||
if elapsed_duration is not None and elapsed_duration > 0:
|
||||
duration = -elapsed_duration
|
||||
elif delayed_start_duration is not None and delayed_start_duration > 0:
|
||||
duration = delayed_start_duration
|
||||
delta = timedelta(minutes=duration)
|
||||
return (now + delta).replace(second=0, microsecond=0)
|
||||
|
||||
|
||||
def _convert_finish_timestamp(
|
||||
remaining_time_list: list[int], start_time_list: list[int]
|
||||
) -> datetime | None:
|
||||
"""Convert raw values representing time into finish timestamp."""
|
||||
now = dt_util.utcnow()
|
||||
program_duration = _convert_duration(remaining_time_list)
|
||||
delayed_start_duration = _convert_duration(start_time_list)
|
||||
if program_duration is None or program_duration == 0:
|
||||
return None
|
||||
duration = program_duration + (
|
||||
delayed_start_duration if delayed_start_duration is not None else 0
|
||||
)
|
||||
delta = timedelta(minutes=duration)
|
||||
return (now + delta).replace(second=0, microsecond=0)
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class MieleSensorDescription(SensorEntityDescription):
|
||||
"""Class describing Miele sensor entities."""
|
||||
|
||||
value_fn: Callable[[MieleDevice], StateType | datetime]
|
||||
end_value_fn: Callable[[StateType | datetime], StateType | datetime] | None = None
|
||||
value_fn: Callable[[MieleDevice], StateType]
|
||||
end_value_fn: Callable[[StateType], StateType] | None = None
|
||||
extra_attributes: dict[str, Callable[[MieleDevice], StateType]] | None = None
|
||||
zone: int | None = None
|
||||
unique_id_fn: Callable[[str, MieleSensorDescription], str] | None = None
|
||||
@@ -465,60 +428,6 @@ SENSOR_TYPES: Final[tuple[MieleSensorDefinition, ...]] = (
|
||||
suggested_unit_of_measurement=UnitOfTime.HOURS,
|
||||
),
|
||||
),
|
||||
MieleSensorDefinition(
|
||||
types=(
|
||||
MieleAppliance.WASHING_MACHINE,
|
||||
MieleAppliance.WASHING_MACHINE_SEMI_PROFESSIONAL,
|
||||
MieleAppliance.TUMBLE_DRYER,
|
||||
MieleAppliance.TUMBLE_DRYER_SEMI_PROFESSIONAL,
|
||||
MieleAppliance.DISHWASHER,
|
||||
MieleAppliance.OVEN,
|
||||
MieleAppliance.OVEN_MICROWAVE,
|
||||
MieleAppliance.STEAM_OVEN,
|
||||
MieleAppliance.MICROWAVE,
|
||||
MieleAppliance.ROBOT_VACUUM_CLEANER,
|
||||
MieleAppliance.WASHER_DRYER,
|
||||
MieleAppliance.STEAM_OVEN_COMBI,
|
||||
MieleAppliance.STEAM_OVEN_MICRO,
|
||||
MieleAppliance.DIALOG_OVEN,
|
||||
MieleAppliance.STEAM_OVEN_MK2,
|
||||
),
|
||||
description=MieleSensorDescription(
|
||||
key="state_finish_timestamp",
|
||||
translation_key="finish",
|
||||
value_fn=lambda value: _convert_finish_timestamp(
|
||||
value.state_remaining_time, value.state_start_time
|
||||
),
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
),
|
||||
MieleSensorDefinition(
|
||||
types=(
|
||||
MieleAppliance.WASHING_MACHINE,
|
||||
MieleAppliance.TUMBLE_DRYER,
|
||||
MieleAppliance.DISHWASHER,
|
||||
MieleAppliance.OVEN,
|
||||
MieleAppliance.OVEN_MICROWAVE,
|
||||
MieleAppliance.STEAM_OVEN,
|
||||
MieleAppliance.MICROWAVE,
|
||||
MieleAppliance.WASHER_DRYER,
|
||||
MieleAppliance.STEAM_OVEN_COMBI,
|
||||
MieleAppliance.STEAM_OVEN_MICRO,
|
||||
MieleAppliance.DIALOG_OVEN,
|
||||
MieleAppliance.ROBOT_VACUUM_CLEANER,
|
||||
MieleAppliance.STEAM_OVEN_MK2,
|
||||
),
|
||||
description=MieleSensorDescription(
|
||||
key="state_start_timestamp",
|
||||
translation_key="start",
|
||||
value_fn=lambda value: _convert_start_timestamp(
|
||||
value.state_elapsed_time, value.state_start_time
|
||||
),
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
),
|
||||
MieleSensorDefinition(
|
||||
types=(
|
||||
MieleAppliance.TUMBLE_DRYER_SEMI_PROFESSIONAL,
|
||||
@@ -711,8 +620,6 @@ async def async_setup_entry(
|
||||
"state_elapsed_time": MieleTimeSensor,
|
||||
"state_remaining_time": MieleTimeSensor,
|
||||
"state_start_time": MieleTimeSensor,
|
||||
"state_start_timestamp": MieleAbsoluteTimeSensor,
|
||||
"state_finish_timestamp": MieleAbsoluteTimeSensor,
|
||||
"current_energy_consumption": MieleConsumptionSensor,
|
||||
"current_water_consumption": MieleConsumptionSensor,
|
||||
}.get(definition.description.key, MieleSensor)
|
||||
@@ -836,7 +743,7 @@ class MieleSensor(MieleEntity, SensorEntity):
|
||||
self._attr_unique_id = description.unique_id_fn(device_id, description)
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType | datetime:
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(self.device)
|
||||
|
||||
@@ -854,7 +761,7 @@ class MieleSensor(MieleEntity, SensorEntity):
|
||||
class MieleRestorableSensor(MieleSensor, RestoreSensor):
|
||||
"""Representation of a Sensor whose internal state can be restored."""
|
||||
|
||||
_attr_native_value: StateType | datetime
|
||||
_attr_native_value: StateType
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""When entity is added to hass."""
|
||||
@@ -866,7 +773,7 @@ class MieleRestorableSensor(MieleSensor, RestoreSensor):
|
||||
self._attr_native_value = last_data.native_value # type: ignore[assignment]
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType | datetime:
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor.
|
||||
|
||||
It is necessary to override `native_value` to fall back to the default
|
||||
@@ -1027,40 +934,6 @@ class MieleTimeSensor(MieleRestorableSensor):
|
||||
self._attr_native_value = current_value
|
||||
|
||||
|
||||
class MieleAbsoluteTimeSensor(MieleRestorableSensor):
|
||||
"""Representation of absolute time sensors handling precision correctness."""
|
||||
|
||||
_previous_value: StateType | datetime = None
|
||||
|
||||
def _update_native_value(self) -> None:
|
||||
"""Update the last value of the sensor."""
|
||||
current_value = self.entity_description.value_fn(self.device)
|
||||
current_status = StateStatus(self.device.state_status)
|
||||
|
||||
# The API reports with minute precision, to avoid changing
|
||||
# the value too often, we keep the cached value if it differs
|
||||
# less than 90s from the new value
|
||||
if (
|
||||
isinstance(self._previous_value, datetime)
|
||||
and isinstance(current_value, datetime)
|
||||
and (
|
||||
self._previous_value - timedelta(seconds=90)
|
||||
< current_value
|
||||
< self._previous_value + timedelta(seconds=90)
|
||||
)
|
||||
) or current_status == StateStatus.PROGRAM_ENDED:
|
||||
return
|
||||
|
||||
# force unknown when appliance is not working (some devices are keeping last value until a new cycle starts)
|
||||
if current_status in (StateStatus.OFF, StateStatus.ON, StateStatus.IDLE):
|
||||
self._attr_native_value = None
|
||||
|
||||
# otherwise, cache value and return it
|
||||
else:
|
||||
self._attr_native_value = current_value
|
||||
self._previous_value = current_value
|
||||
|
||||
|
||||
class MieleConsumptionSensor(MieleRestorableSensor):
|
||||
"""Representation of consumption sensors keeping state from cache."""
|
||||
|
||||
@@ -1070,19 +943,13 @@ class MieleConsumptionSensor(MieleRestorableSensor):
|
||||
"""Update the last value of the sensor."""
|
||||
current_value = self.entity_description.value_fn(self.device)
|
||||
current_status = StateStatus(self.device.state_status)
|
||||
# Guard for corrupt restored value
|
||||
restored_value = (
|
||||
self._attr_native_value
|
||||
if isinstance(self._attr_native_value, (int, float))
|
||||
else 0
|
||||
)
|
||||
last_value = (
|
||||
float(cast(str, restored_value))
|
||||
float(cast(str, self._attr_native_value))
|
||||
if self._attr_native_value is not None
|
||||
else 0
|
||||
)
|
||||
|
||||
# Force unknown when appliance is not able to report consumption
|
||||
# force unknown when appliance is not able to report consumption
|
||||
if current_status in (
|
||||
StateStatus.ON,
|
||||
StateStatus.OFF,
|
||||
|
||||
@@ -216,9 +216,6 @@
|
||||
"energy_forecast": {
|
||||
"name": "Energy forecast"
|
||||
},
|
||||
"finish": {
|
||||
"name": "Finish"
|
||||
},
|
||||
"plate": {
|
||||
"name": "Plate {plate_no}",
|
||||
"state": {
|
||||
@@ -1018,9 +1015,6 @@
|
||||
"spin_speed": {
|
||||
"name": "Spin speed"
|
||||
},
|
||||
"start": {
|
||||
"name": "Start"
|
||||
},
|
||||
"start_time": {
|
||||
"name": "Start in"
|
||||
},
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
"""Constants for the NOAA Tides integration."""
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
CONF_STATION_ID = "station_id"
|
||||
|
||||
DEFAULT_NAME = "NOAA Tides"
|
||||
DEFAULT_PREDICTION_LENGTH = timedelta(days=2)
|
||||
DEFAULT_TIMEZONE = "lst_ldt"
|
||||
|
||||
ATTRIBUTION = "Data provided by NOAA"
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Literal, TypedDict
|
||||
|
||||
@@ -22,13 +22,6 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util.unit_system import METRIC_SYSTEM
|
||||
|
||||
from .const import (
|
||||
ATTRIBUTION,
|
||||
CONF_STATION_ID,
|
||||
DEFAULT_NAME,
|
||||
DEFAULT_PREDICTION_LENGTH,
|
||||
DEFAULT_TIMEZONE,
|
||||
)
|
||||
from .helpers import get_station_unique_id
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -36,6 +29,13 @@ if TYPE_CHECKING:
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_STATION_ID = "station_id"
|
||||
|
||||
DEFAULT_NAME = "NOAA Tides"
|
||||
DEFAULT_TIMEZONE = "lst_ldt"
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=60)
|
||||
|
||||
TIMEZONES = ["gmt", "lst", "lst_ldt"]
|
||||
UNIT_SYSTEMS = ["english", "metric"]
|
||||
|
||||
@@ -63,9 +63,9 @@ def setup_platform(
|
||||
if CONF_UNIT_SYSTEM in config:
|
||||
unit_system = config[CONF_UNIT_SYSTEM]
|
||||
elif hass.config.units is METRIC_SYSTEM:
|
||||
unit_system = "metric"
|
||||
unit_system = UNIT_SYSTEMS[1]
|
||||
else:
|
||||
unit_system = "english"
|
||||
unit_system = UNIT_SYSTEMS[0]
|
||||
|
||||
try:
|
||||
station = coops.Station(station_id, unit_system)
|
||||
@@ -97,7 +97,7 @@ class NOAATidesData(TypedDict):
|
||||
class NOAATidesAndCurrentsSensor(SensorEntity):
|
||||
"""Representation of a NOAA Tides and Currents sensor."""
|
||||
|
||||
_attr_attribution = ATTRIBUTION
|
||||
_attr_attribution = "Data provided by NOAA"
|
||||
|
||||
def __init__(self, name, station_id, timezone, unit_system, station) -> None:
|
||||
"""Initialize the sensor."""
|
||||
@@ -141,8 +141,8 @@ class NOAATidesAndCurrentsSensor(SensorEntity):
|
||||
return attr
|
||||
|
||||
@property
|
||||
def native_value(self) -> str | None:
|
||||
"""Return the state."""
|
||||
def native_value(self):
|
||||
"""Return the state of the device."""
|
||||
if self.data is None:
|
||||
return None
|
||||
api_time = self.data["time_stamp"][0]
|
||||
@@ -157,7 +157,8 @@ class NOAATidesAndCurrentsSensor(SensorEntity):
|
||||
def update(self) -> None:
|
||||
"""Get the latest data from NOAA Tides and Currents API."""
|
||||
begin = datetime.now()
|
||||
end = begin + DEFAULT_PREDICTION_LENGTH
|
||||
delta = timedelta(days=2)
|
||||
end = begin + delta
|
||||
try:
|
||||
df_predictions = self._station.get_data(
|
||||
begin_date=begin.strftime("%Y%m%d %H:%M"),
|
||||
|
||||
@@ -208,20 +208,6 @@ SENSOR_DESCRIPTIONS: list[OverkizSensorDescription] = [
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
OverkizSensorDescription(
|
||||
key=OverkizState.IO_POWER_HEAT_PUMP,
|
||||
name="Heat pump power consumption",
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
OverkizSensorDescription(
|
||||
key=OverkizState.IO_POWER_HEAT_ELECTRICAL,
|
||||
name="Electric power consumption",
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
OverkizSensorDescription(
|
||||
key=OverkizState.CORE_CONSUMPTION_TARIFF1,
|
||||
name="Consumption tariff 1",
|
||||
|
||||
@@ -45,8 +45,8 @@
|
||||
"name": "Fuel drying",
|
||||
"state": {
|
||||
"dry": "Dry",
|
||||
"extremely_dry": "Extremely dry",
|
||||
"moderate_wet": "Moderately wet",
|
||||
"extremely_dry": "Extemely dry",
|
||||
"moderate_wet": "Moderate wet",
|
||||
"very_dry": "Very dry",
|
||||
"very_wet": "Very wet",
|
||||
"wet": "Wet"
|
||||
|
||||
@@ -57,7 +57,4 @@ async def async_setup_entry(hass: HomeAssistant, entry: SolarEdgeConfigEntry) ->
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: SolarEdgeConfigEntry) -> bool:
|
||||
"""Unload SolarEdge config entry."""
|
||||
if DATA_API_CLIENT not in entry.runtime_data:
|
||||
return True # Nothing to unload
|
||||
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
@@ -133,11 +133,8 @@ class SolarEdgeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
if api_key_ok and web_login_ok:
|
||||
data = {CONF_SITE_ID: site_id}
|
||||
if api_key:
|
||||
data[CONF_API_KEY] = api_key
|
||||
if username:
|
||||
data[CONF_USERNAME] = username
|
||||
data[CONF_PASSWORD] = web_auth[CONF_PASSWORD]
|
||||
data.update(api_auth)
|
||||
data.update(web_auth)
|
||||
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
if TYPE_CHECKING:
|
||||
|
||||
@@ -49,9 +49,7 @@ QUERY_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_COLUMN_NAME): cv.string,
|
||||
vol.Required(CONF_NAME): cv.template,
|
||||
vol.Required(CONF_QUERY): vol.All(
|
||||
cv.template, ValueTemplate.from_template, validate_sql_select
|
||||
),
|
||||
vol.Required(CONF_QUERY): vol.All(cv.string, validate_sql_select),
|
||||
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
|
||||
vol.Optional(CONF_VALUE_TEMPLATE): vol.All(
|
||||
cv.template, ValueTemplate.from_template
|
||||
|
||||
@@ -9,6 +9,8 @@ import sqlalchemy
|
||||
from sqlalchemy.engine import Engine, Result
|
||||
from sqlalchemy.exc import MultipleResultsFound, NoSuchColumnError, SQLAlchemyError
|
||||
from sqlalchemy.orm import Session, scoped_session, sessionmaker
|
||||
import sqlparse
|
||||
from sqlparse.exceptions import SQLParseError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.recorder import CONF_DB_URL, get_instance
|
||||
@@ -29,28 +31,21 @@ from homeassistant.const import (
|
||||
CONF_UNIT_OF_MEASUREMENT,
|
||||
CONF_VALUE_TEMPLATE,
|
||||
)
|
||||
from homeassistant.core import async_get_hass, callback
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import section
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers import selector
|
||||
|
||||
from .const import CONF_ADVANCED_OPTIONS, CONF_COLUMN_NAME, CONF_QUERY, DOMAIN
|
||||
from .util import (
|
||||
EmptyQueryError,
|
||||
InvalidSqlQuery,
|
||||
MultipleQueryError,
|
||||
NotSelectQueryError,
|
||||
UnknownQueryTypeError,
|
||||
check_and_render_sql_query,
|
||||
resolve_db_url,
|
||||
)
|
||||
from .util import resolve_db_url
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
OPTIONS_SCHEMA: vol.Schema = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_QUERY): selector.TemplateSelector(),
|
||||
vol.Required(CONF_QUERY): selector.TextSelector(
|
||||
selector.TextSelectorConfig(multiline=True)
|
||||
),
|
||||
vol.Required(CONF_COLUMN_NAME): selector.TextSelector(),
|
||||
vol.Required(CONF_ADVANCED_OPTIONS): section(
|
||||
vol.Schema(
|
||||
@@ -94,12 +89,14 @@ CONFIG_SCHEMA: vol.Schema = vol.Schema(
|
||||
|
||||
def validate_sql_select(value: str) -> str:
|
||||
"""Validate that value is a SQL SELECT query."""
|
||||
hass = async_get_hass()
|
||||
try:
|
||||
return check_and_render_sql_query(hass, value)
|
||||
except (TemplateError, InvalidSqlQuery) as err:
|
||||
_LOGGER.debug("Invalid query '%s' results in '%s'", value, err.args[0])
|
||||
raise
|
||||
if len(query := sqlparse.parse(value.lstrip().lstrip(";"))) > 1:
|
||||
raise MultipleResultsFound
|
||||
if len(query) == 0 or (query_type := query[0].get_type()) == "UNKNOWN":
|
||||
raise ValueError
|
||||
if query_type != "SELECT":
|
||||
_LOGGER.debug("The SQL query %s is of type %s", query, query_type)
|
||||
raise SQLParseError
|
||||
return str(query[0])
|
||||
|
||||
|
||||
def validate_db_connection(db_url: str) -> bool:
|
||||
@@ -141,7 +138,7 @@ def validate_query(db_url: str, query: str, column: str) -> bool:
|
||||
if sess:
|
||||
sess.close()
|
||||
engine.dispose()
|
||||
raise InvalidSqlQuery from error
|
||||
raise ValueError(error) from error
|
||||
|
||||
for res in result.mappings():
|
||||
if column not in res:
|
||||
@@ -227,13 +224,13 @@ class SQLConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
except NoSuchColumnError:
|
||||
errors["column"] = "column_invalid"
|
||||
description_placeholders = {"column": column}
|
||||
except (MultipleResultsFound, MultipleQueryError):
|
||||
except MultipleResultsFound:
|
||||
errors["query"] = "multiple_queries"
|
||||
except SQLAlchemyError:
|
||||
errors["db_url"] = "db_url_invalid"
|
||||
except (NotSelectQueryError, UnknownQueryTypeError):
|
||||
except SQLParseError:
|
||||
errors["query"] = "query_no_read_only"
|
||||
except (TemplateError, EmptyQueryError, InvalidSqlQuery) as err:
|
||||
except ValueError as err:
|
||||
_LOGGER.debug("Invalid query: %s", err)
|
||||
errors["query"] = "query_invalid"
|
||||
|
||||
@@ -285,13 +282,13 @@ class SQLOptionsFlowHandler(OptionsFlowWithReload):
|
||||
except NoSuchColumnError:
|
||||
errors["column"] = "column_invalid"
|
||||
description_placeholders = {"column": column}
|
||||
except (MultipleResultsFound, MultipleQueryError):
|
||||
except MultipleResultsFound:
|
||||
errors["query"] = "multiple_queries"
|
||||
except SQLAlchemyError:
|
||||
errors["db_url"] = "db_url_invalid"
|
||||
except (NotSelectQueryError, UnknownQueryTypeError):
|
||||
except SQLParseError:
|
||||
errors["query"] = "query_no_read_only"
|
||||
except (TemplateError, EmptyQueryError, InvalidSqlQuery) as err:
|
||||
except ValueError as err:
|
||||
_LOGGER.debug("Invalid query: %s", err)
|
||||
errors["query"] = "query_invalid"
|
||||
else:
|
||||
|
||||
@@ -22,7 +22,7 @@ from homeassistant.const import (
|
||||
MATCH_ALL,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import PlatformNotReady, TemplateError
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
@@ -40,9 +40,7 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import CONF_ADVANCED_OPTIONS, CONF_COLUMN_NAME, CONF_QUERY, DOMAIN
|
||||
from .util import (
|
||||
InvalidSqlQuery,
|
||||
async_create_sessionmaker,
|
||||
check_and_render_sql_query,
|
||||
convert_value,
|
||||
generate_lambda_stmt,
|
||||
redact_credentials,
|
||||
@@ -83,7 +81,7 @@ async def async_setup_platform(
|
||||
return
|
||||
|
||||
name: Template = conf[CONF_NAME]
|
||||
query_template: ValueTemplate = conf[CONF_QUERY]
|
||||
query_str: str = conf[CONF_QUERY]
|
||||
value_template: ValueTemplate | None = conf.get(CONF_VALUE_TEMPLATE)
|
||||
column_name: str = conf[CONF_COLUMN_NAME]
|
||||
unique_id: str | None = conf.get(CONF_UNIQUE_ID)
|
||||
@@ -98,7 +96,7 @@ async def async_setup_platform(
|
||||
await async_setup_sensor(
|
||||
hass,
|
||||
trigger_entity_config,
|
||||
query_template,
|
||||
query_str,
|
||||
column_name,
|
||||
value_template,
|
||||
unique_id,
|
||||
@@ -121,13 +119,6 @@ async def async_setup_entry(
|
||||
template: str | None = entry.options[CONF_ADVANCED_OPTIONS].get(CONF_VALUE_TEMPLATE)
|
||||
column_name: str = entry.options[CONF_COLUMN_NAME]
|
||||
|
||||
query_template: ValueTemplate | None = None
|
||||
try:
|
||||
query_template = ValueTemplate(query_str, hass)
|
||||
query_template.ensure_valid()
|
||||
except TemplateError as err:
|
||||
raise PlatformNotReady("Invalid SQL query template") from err
|
||||
|
||||
value_template: ValueTemplate | None = None
|
||||
if template is not None:
|
||||
try:
|
||||
@@ -146,7 +137,7 @@ async def async_setup_entry(
|
||||
await async_setup_sensor(
|
||||
hass,
|
||||
trigger_entity_config,
|
||||
query_template,
|
||||
query_str,
|
||||
column_name,
|
||||
value_template,
|
||||
entry.entry_id,
|
||||
@@ -159,7 +150,7 @@ async def async_setup_entry(
|
||||
async def async_setup_sensor(
|
||||
hass: HomeAssistant,
|
||||
trigger_entity_config: ConfigType,
|
||||
query_template: ValueTemplate,
|
||||
query_str: str,
|
||||
column_name: str,
|
||||
value_template: ValueTemplate | None,
|
||||
unique_id: str | None,
|
||||
@@ -175,25 +166,22 @@ async def async_setup_sensor(
|
||||
) = await async_create_sessionmaker(hass, db_url)
|
||||
if sessmaker is None:
|
||||
return
|
||||
validate_query(hass, query_template, uses_recorder_db, unique_id)
|
||||
validate_query(hass, query_str, uses_recorder_db, unique_id)
|
||||
|
||||
query_str = check_and_render_sql_query(hass, query_template)
|
||||
upper_query = query_str.upper()
|
||||
# MSSQL uses TOP and not LIMIT
|
||||
mod_query_template = query_template
|
||||
if not ("LIMIT" in upper_query or "SELECT TOP" in upper_query):
|
||||
if "mssql" in db_url:
|
||||
_query = query_template.template.replace("SELECT", "SELECT TOP 1")
|
||||
query_str = upper_query.replace("SELECT", "SELECT TOP 1")
|
||||
else:
|
||||
_query = query_template.template.replace(";", "") + " LIMIT 1;"
|
||||
mod_query_template = ValueTemplate(_query, hass)
|
||||
query_str = query_str.replace(";", "") + " LIMIT 1;"
|
||||
|
||||
async_add_entities(
|
||||
[
|
||||
SQLSensor(
|
||||
trigger_entity_config,
|
||||
sessmaker,
|
||||
mod_query_template,
|
||||
query_str,
|
||||
column_name,
|
||||
value_template,
|
||||
yaml,
|
||||
@@ -212,7 +200,7 @@ class SQLSensor(ManualTriggerSensorEntity):
|
||||
self,
|
||||
trigger_entity_config: ConfigType,
|
||||
sessmaker: scoped_session,
|
||||
query: ValueTemplate,
|
||||
query: str,
|
||||
column: str,
|
||||
value_template: ValueTemplate | None,
|
||||
yaml: bool,
|
||||
@@ -226,6 +214,7 @@ class SQLSensor(ManualTriggerSensorEntity):
|
||||
self.sessionmaker = sessmaker
|
||||
self._attr_extra_state_attributes = {}
|
||||
self._use_database_executor = use_database_executor
|
||||
self._lambda_stmt = generate_lambda_stmt(query)
|
||||
if not yaml and (unique_id := trigger_entity_config.get(CONF_UNIQUE_ID)):
|
||||
self._attr_name = None
|
||||
self._attr_has_entity_name = True
|
||||
@@ -266,22 +255,11 @@ class SQLSensor(ManualTriggerSensorEntity):
|
||||
self._attr_extra_state_attributes = {}
|
||||
sess: scoped_session = self.sessionmaker()
|
||||
try:
|
||||
rendered_query = check_and_render_sql_query(self.hass, self._query)
|
||||
_lambda_stmt = generate_lambda_stmt(rendered_query)
|
||||
result: Result = sess.execute(_lambda_stmt)
|
||||
except (TemplateError, InvalidSqlQuery) as err:
|
||||
_LOGGER.error(
|
||||
"Error rendering query %s: %s",
|
||||
redact_credentials(self._query.template),
|
||||
redact_credentials(str(err)),
|
||||
)
|
||||
sess.rollback()
|
||||
sess.close()
|
||||
return
|
||||
result: Result = sess.execute(self._lambda_stmt)
|
||||
except SQLAlchemyError as err:
|
||||
_LOGGER.error(
|
||||
"Error executing query %s: %s",
|
||||
rendered_query,
|
||||
self._query,
|
||||
redact_credentials(str(err)),
|
||||
)
|
||||
sess.rollback()
|
||||
@@ -289,7 +267,7 @@ class SQLSensor(ManualTriggerSensorEntity):
|
||||
return
|
||||
|
||||
for res in result.mappings():
|
||||
_LOGGER.debug("Query %s result in %s", rendered_query, res.items())
|
||||
_LOGGER.debug("Query %s result in %s", self._query, res.items())
|
||||
data = res[self._column_name]
|
||||
for key, value in res.items():
|
||||
self._attr_extra_state_attributes[key] = convert_value(value)
|
||||
@@ -309,6 +287,6 @@ class SQLSensor(ManualTriggerSensorEntity):
|
||||
self._attr_native_value = data
|
||||
|
||||
if data is None:
|
||||
_LOGGER.warning("%s returned no results", rendered_query)
|
||||
_LOGGER.warning("%s returned no results", self._query)
|
||||
|
||||
sess.close()
|
||||
|
||||
@@ -19,13 +19,11 @@ from homeassistant.core import (
|
||||
)
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.trigger_template_entity import ValueTemplate
|
||||
from homeassistant.util.json import JsonValueType
|
||||
|
||||
from .const import CONF_QUERY, DOMAIN
|
||||
from .util import (
|
||||
async_create_sessionmaker,
|
||||
check_and_render_sql_query,
|
||||
convert_value,
|
||||
generate_lambda_stmt,
|
||||
redact_credentials,
|
||||
@@ -39,9 +37,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
SERVICE_QUERY = "query"
|
||||
SERVICE_QUERY_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_QUERY): vol.All(
|
||||
cv.template, ValueTemplate.from_template, validate_sql_select
|
||||
),
|
||||
vol.Required(CONF_QUERY): vol.All(cv.string, validate_sql_select),
|
||||
vol.Optional(CONF_DB_URL): cv.string,
|
||||
}
|
||||
)
|
||||
@@ -76,9 +72,8 @@ async def _async_query_service(
|
||||
def _execute_and_convert_query() -> list[JsonValueType]:
|
||||
"""Execute the query and return the results with converted types."""
|
||||
sess: Session = sessmaker()
|
||||
rendered_query = check_and_render_sql_query(call.hass, query_str)
|
||||
try:
|
||||
result: Result = sess.execute(generate_lambda_stmt(rendered_query))
|
||||
result: Result = sess.execute(generate_lambda_stmt(query_str))
|
||||
except SQLAlchemyError as err:
|
||||
_LOGGER.debug(
|
||||
"Error executing query %s: %s",
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"db_url_invalid": "Database URL invalid",
|
||||
"multiple_queries": "Multiple SQL queries are not supported",
|
||||
"query_invalid": "SQL query invalid",
|
||||
"query_no_read_only": "SQL query is not a read-only SELECT query or it's of an unknown type"
|
||||
"query_no_read_only": "SQL query must be read-only"
|
||||
},
|
||||
"step": {
|
||||
"options": {
|
||||
|
||||
@@ -19,9 +19,7 @@ import voluptuous as vol
|
||||
from homeassistant.components.recorder import SupportedDialect, get_instance
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, TemplateError
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.helpers.template import Template
|
||||
|
||||
from .const import DB_URL_RE, DOMAIN
|
||||
from .models import SQLData
|
||||
@@ -46,14 +44,16 @@ def resolve_db_url(hass: HomeAssistant, db_url: str | None) -> str:
|
||||
return get_instance(hass).db_url
|
||||
|
||||
|
||||
def validate_sql_select(value: Template) -> Template:
|
||||
def validate_sql_select(value: str) -> str:
|
||||
"""Validate that value is a SQL SELECT query."""
|
||||
try:
|
||||
assert value.hass
|
||||
check_and_render_sql_query(value.hass, value)
|
||||
except (TemplateError, InvalidSqlQuery) as err:
|
||||
raise vol.Invalid(str(err)) from err
|
||||
return value
|
||||
if len(query := sqlparse.parse(value.lstrip().lstrip(";"))) > 1:
|
||||
raise vol.Invalid("Multiple SQL queries are not supported")
|
||||
if len(query) == 0 or (query_type := query[0].get_type()) == "UNKNOWN":
|
||||
raise vol.Invalid("Invalid SQL query")
|
||||
if query_type != "SELECT":
|
||||
_LOGGER.debug("The SQL query %s is of type %s", query, query_type)
|
||||
raise vol.Invalid("Only SELECT queries allowed")
|
||||
return str(query[0])
|
||||
|
||||
|
||||
async def async_create_sessionmaker(
|
||||
@@ -113,7 +113,7 @@ async def async_create_sessionmaker(
|
||||
|
||||
def validate_query(
|
||||
hass: HomeAssistant,
|
||||
query_template: str | Template,
|
||||
query_str: str,
|
||||
uses_recorder_db: bool,
|
||||
unique_id: str | None = None,
|
||||
) -> None:
|
||||
@@ -121,7 +121,7 @@ def validate_query(
|
||||
|
||||
Args:
|
||||
hass: The Home Assistant instance.
|
||||
query_template: The SQL query string to be validated.
|
||||
query_str: The SQL query string to be validated.
|
||||
uses_recorder_db: A boolean indicating if the query is against the recorder database.
|
||||
unique_id: The unique ID of the entity, used for creating issue registry keys.
|
||||
|
||||
@@ -131,10 +131,6 @@ def validate_query(
|
||||
"""
|
||||
if not uses_recorder_db:
|
||||
return
|
||||
if isinstance(query_template, Template):
|
||||
query_str = query_template.async_render()
|
||||
else:
|
||||
query_str = Template(query_template, hass).async_render()
|
||||
redacted_query = redact_credentials(query_str)
|
||||
|
||||
issue_key = unique_id if unique_id else redacted_query
|
||||
@@ -243,49 +239,3 @@ def convert_value(value: Any) -> Any:
|
||||
return f"0x{value.hex()}"
|
||||
case _:
|
||||
return value
|
||||
|
||||
|
||||
def check_and_render_sql_query(hass: HomeAssistant, query: Template | str) -> str:
|
||||
"""Check and render SQL query."""
|
||||
if isinstance(query, str):
|
||||
query = query.strip()
|
||||
if not query:
|
||||
raise EmptyQueryError("Query cannot be empty")
|
||||
query = Template(query, hass=hass)
|
||||
|
||||
# Raises TemplateError if template is invalid
|
||||
query.ensure_valid()
|
||||
rendered_query: str = query.async_render()
|
||||
|
||||
if len(rendered_queries := sqlparse.parse(rendered_query.lstrip().lstrip(";"))) > 1:
|
||||
raise MultipleQueryError("Multiple SQL statements are not allowed")
|
||||
if (
|
||||
len(rendered_queries) == 0
|
||||
or (query_type := rendered_queries[0].get_type()) == "UNKNOWN"
|
||||
):
|
||||
raise UnknownQueryTypeError("SQL query is empty or unknown type")
|
||||
if query_type != "SELECT":
|
||||
_LOGGER.debug("The SQL query %s is of type %s", rendered_query, query_type)
|
||||
raise NotSelectQueryError("SQL query must be of type SELECT")
|
||||
|
||||
return str(rendered_queries[0])
|
||||
|
||||
|
||||
class InvalidSqlQuery(HomeAssistantError):
|
||||
"""SQL query is invalid error."""
|
||||
|
||||
|
||||
class EmptyQueryError(InvalidSqlQuery):
|
||||
"""SQL query is empty error."""
|
||||
|
||||
|
||||
class MultipleQueryError(InvalidSqlQuery):
|
||||
"""SQL query is multiple error."""
|
||||
|
||||
|
||||
class UnknownQueryTypeError(InvalidSqlQuery):
|
||||
"""SQL query is of unknown type error."""
|
||||
|
||||
|
||||
class NotSelectQueryError(InvalidSqlQuery):
|
||||
"""SQL query is not a SELECT statement error."""
|
||||
|
||||
@@ -66,11 +66,11 @@ def get_process(entity: SystemMonitorSensor) -> bool:
|
||||
class SysMonitorBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
"""Describes System Monitor binary sensor entities."""
|
||||
|
||||
value_fn: Callable[[SystemMonitorSensor], bool | None]
|
||||
value_fn: Callable[[SystemMonitorSensor], bool]
|
||||
add_to_update: Callable[[SystemMonitorSensor], tuple[str, str]]
|
||||
|
||||
|
||||
PROCESS_TYPES: tuple[SysMonitorBinarySensorEntityDescription, ...] = (
|
||||
SENSOR_TYPES: tuple[SysMonitorBinarySensorEntityDescription, ...] = (
|
||||
SysMonitorBinarySensorEntityDescription(
|
||||
key="binary_process",
|
||||
translation_key="process",
|
||||
@@ -81,20 +81,6 @@ PROCESS_TYPES: tuple[SysMonitorBinarySensorEntityDescription, ...] = (
|
||||
),
|
||||
)
|
||||
|
||||
BINARY_SENSOR_TYPES: tuple[SysMonitorBinarySensorEntityDescription, ...] = (
|
||||
SysMonitorBinarySensorEntityDescription(
|
||||
key="battery_plugged",
|
||||
value_fn=(
|
||||
lambda entity: entity.coordinator.data.battery.power_plugged
|
||||
if entity.coordinator.data.battery
|
||||
else None
|
||||
),
|
||||
device_class=BinarySensorDeviceClass.BATTERY_CHARGING,
|
||||
add_to_update=lambda entity: ("battery", ""),
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -104,30 +90,18 @@ async def async_setup_entry(
|
||||
"""Set up System Monitor binary sensors based on a config entry."""
|
||||
coordinator = entry.runtime_data.coordinator
|
||||
|
||||
entities: list[SystemMonitorSensor] = []
|
||||
|
||||
entities.extend(
|
||||
async_add_entities(
|
||||
SystemMonitorSensor(
|
||||
coordinator,
|
||||
sensor_description,
|
||||
entry.entry_id,
|
||||
argument,
|
||||
)
|
||||
for sensor_description in PROCESS_TYPES
|
||||
for sensor_description in SENSOR_TYPES
|
||||
for argument in entry.options.get(BINARY_SENSOR_DOMAIN, {}).get(
|
||||
CONF_PROCESS, []
|
||||
)
|
||||
)
|
||||
entities.extend(
|
||||
SystemMonitorSensor(
|
||||
coordinator,
|
||||
sensor_description,
|
||||
entry.entry_id,
|
||||
"",
|
||||
)
|
||||
for sensor_description in BINARY_SENSOR_TYPES
|
||||
)
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class SystemMonitorSensor(
|
||||
|
||||
@@ -9,7 +9,7 @@ import os
|
||||
from typing import TYPE_CHECKING, Any, NamedTuple
|
||||
|
||||
from psutil import Process
|
||||
from psutil._common import sbattery, sdiskusage, shwtemp, snetio, snicaddr, sswap
|
||||
from psutil._common import sdiskusage, shwtemp, snetio, snicaddr, sswap
|
||||
import psutil_home_assistant as ha_psutil
|
||||
|
||||
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN
|
||||
@@ -22,7 +22,6 @@ from .const import CONF_PROCESS, PROCESS_ERRORS
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import SystemMonitorConfigEntry
|
||||
from .util import read_fan_speed
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -32,11 +31,9 @@ class SensorData:
|
||||
"""Sensor data."""
|
||||
|
||||
addresses: dict[str, list[snicaddr]]
|
||||
battery: sbattery | None
|
||||
boot_time: datetime
|
||||
cpu_percent: float | None
|
||||
disk_usage: dict[str, sdiskusage]
|
||||
fan_speed: dict[str, int]
|
||||
io_counters: dict[str, snetio]
|
||||
load: tuple[float, float, float]
|
||||
memory: VirtualMemory
|
||||
@@ -53,23 +50,17 @@ class SensorData:
|
||||
disk_usage = None
|
||||
if self.disk_usage:
|
||||
disk_usage = {k: str(v) for k, v in self.disk_usage.items()}
|
||||
fan_speed = None
|
||||
if self.fan_speed:
|
||||
fan_speed = {k: str(v) for k, v in self.fan_speed.items()}
|
||||
io_counters = None
|
||||
if self.io_counters:
|
||||
io_counters = {k: str(v) for k, v in self.io_counters.items()}
|
||||
temperatures = None
|
||||
if self.temperatures:
|
||||
temperatures = {k: str(v) for k, v in self.temperatures.items()}
|
||||
|
||||
return {
|
||||
"addresses": addresses,
|
||||
"battery": str(self.battery),
|
||||
"boot_time": str(self.boot_time),
|
||||
"cpu_percent": str(self.cpu_percent),
|
||||
"disk_usage": disk_usage,
|
||||
"fan_speed": fan_speed,
|
||||
"io_counters": io_counters,
|
||||
"load": str(self.load),
|
||||
"memory": str(self.memory),
|
||||
@@ -134,10 +125,8 @@ class SystemMonitorCoordinator(TimestampDataUpdateCoordinator[SensorData]):
|
||||
return {
|
||||
**_disk_defaults,
|
||||
("addresses", ""): set(),
|
||||
("battery", ""): set(),
|
||||
("boot", ""): set(),
|
||||
("cpu_percent", ""): set(),
|
||||
("fan_speed", ""): set(),
|
||||
("io_counters", ""): set(),
|
||||
("load", ""): set(),
|
||||
("memory", ""): set(),
|
||||
@@ -165,11 +154,9 @@ class SystemMonitorCoordinator(TimestampDataUpdateCoordinator[SensorData]):
|
||||
self._initial_update = False
|
||||
return SensorData(
|
||||
addresses=_data["addresses"],
|
||||
battery=_data["battery"],
|
||||
boot_time=_data["boot_time"],
|
||||
cpu_percent=cpu_percent,
|
||||
disk_usage=_data["disks"],
|
||||
fan_speed=_data["fan_speed"],
|
||||
io_counters=_data["io_counters"],
|
||||
load=load,
|
||||
memory=_data["memory"],
|
||||
@@ -268,29 +255,10 @@ class SystemMonitorCoordinator(TimestampDataUpdateCoordinator[SensorData]):
|
||||
except AttributeError:
|
||||
_LOGGER.debug("OS does not provide temperature sensors")
|
||||
|
||||
fan_speed: dict[str, int] = {}
|
||||
if self.update_subscribers[("fan_speed", "")] or self._initial_update:
|
||||
try:
|
||||
fan_sensors = self._psutil.sensors_fans()
|
||||
fan_speed = read_fan_speed(fan_sensors)
|
||||
_LOGGER.debug("fan_speed: %s", fan_speed)
|
||||
except AttributeError:
|
||||
_LOGGER.debug("OS does not provide fan sensors")
|
||||
|
||||
battery: sbattery | None = None
|
||||
if self.update_subscribers[("battery", "")] or self._initial_update:
|
||||
try:
|
||||
battery = self._psutil.sensors_battery()
|
||||
_LOGGER.debug("battery: %s", battery)
|
||||
except AttributeError:
|
||||
_LOGGER.debug("OS does not provide battery sensors")
|
||||
|
||||
return {
|
||||
"addresses": addresses,
|
||||
"battery": battery,
|
||||
"boot_time": self.boot_time,
|
||||
"disks": disks,
|
||||
"fan_speed": fan_speed,
|
||||
"io_counters": io_counters,
|
||||
"memory": memory,
|
||||
"process_fds": process_fds,
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"battery_empty": {
|
||||
"default": "mdi:battery-clock"
|
||||
},
|
||||
"disk_free": {
|
||||
"default": "mdi:harddisk"
|
||||
},
|
||||
@@ -13,9 +10,6 @@
|
||||
"disk_use_percent": {
|
||||
"default": "mdi:harddisk"
|
||||
},
|
||||
"fan_speed": {
|
||||
"default": "mdi:fan"
|
||||
},
|
||||
"ipv4_address": {
|
||||
"default": "mdi:ip-network"
|
||||
},
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
from collections.abc import Callable
|
||||
import contextlib
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime
|
||||
from functools import lru_cache
|
||||
import ipaddress
|
||||
import logging
|
||||
@@ -14,8 +14,6 @@ import sys
|
||||
import time
|
||||
from typing import Any, Literal
|
||||
|
||||
from psutil._common import POWER_TIME_UNKNOWN, POWER_TIME_UNLIMITED
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
DOMAIN as SENSOR_DOMAIN,
|
||||
SensorDeviceClass,
|
||||
@@ -25,7 +23,6 @@ from homeassistant.components.sensor import (
|
||||
)
|
||||
from homeassistant.const import (
|
||||
PERCENTAGE,
|
||||
REVOLUTIONS_PER_MINUTE,
|
||||
EntityCategory,
|
||||
UnitOfDataRate,
|
||||
UnitOfInformation,
|
||||
@@ -37,7 +34,7 @@ from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.util import dt as dt_util, slugify
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from . import SystemMonitorConfigEntry
|
||||
from .binary_sensor import BINARY_SENSOR_DOMAIN
|
||||
@@ -58,11 +55,7 @@ SENSOR_TYPE_MANDATORY_ARG = 4
|
||||
|
||||
SIGNAL_SYSTEMMONITOR_UPDATE = "systemmonitor_update"
|
||||
|
||||
BATTERY_REMAIN_UNKNOWNS = (POWER_TIME_UNKNOWN, POWER_TIME_UNLIMITED)
|
||||
|
||||
SENSORS_NO_ARG = (
|
||||
"battery_empty",
|
||||
"battery",
|
||||
"last_boot",
|
||||
"load_",
|
||||
"memory_",
|
||||
@@ -71,7 +64,6 @@ SENSORS_NO_ARG = (
|
||||
)
|
||||
SENSORS_WITH_ARG = {
|
||||
"disk_": "disk_arguments",
|
||||
"fan_speed": "fan_speed_arguments",
|
||||
"ipv": "network_arguments",
|
||||
"process_num_fds": "processes",
|
||||
**dict.fromkeys(NET_IO_TYPES, "network_arguments"),
|
||||
@@ -147,17 +139,6 @@ def get_process_num_fds(entity: SystemMonitorSensor) -> int | None:
|
||||
return process_fds.get(entity.argument)
|
||||
|
||||
|
||||
def battery_time_ends(entity: SystemMonitorSensor) -> datetime | None:
|
||||
"""Return when battery runs out, rounded to minute."""
|
||||
battery = entity.coordinator.data.battery
|
||||
if not battery or battery.secsleft in BATTERY_REMAIN_UNKNOWNS:
|
||||
return None
|
||||
|
||||
return (dt_util.utcnow() + timedelta(seconds=battery.secsleft)).replace(
|
||||
second=0, microsecond=0
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class SysMonitorSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes System Monitor sensor entities."""
|
||||
@@ -170,28 +151,6 @@ class SysMonitorSensorEntityDescription(SensorEntityDescription):
|
||||
|
||||
|
||||
SENSOR_TYPES: dict[str, SysMonitorSensorEntityDescription] = {
|
||||
"battery": SysMonitorSensorEntityDescription(
|
||||
key="battery",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=(
|
||||
lambda entity: entity.coordinator.data.battery.percent
|
||||
if entity.coordinator.data.battery
|
||||
else None
|
||||
),
|
||||
none_is_unavailable=True,
|
||||
add_to_update=lambda entity: ("battery", ""),
|
||||
),
|
||||
"battery_empty": SysMonitorSensorEntityDescription(
|
||||
key="battery_empty",
|
||||
translation_key="battery_empty",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=battery_time_ends,
|
||||
none_is_unavailable=True,
|
||||
add_to_update=lambda entity: ("battery", ""),
|
||||
),
|
||||
"disk_free": SysMonitorSensorEntityDescription(
|
||||
key="disk_free",
|
||||
translation_key="disk_free",
|
||||
@@ -240,16 +199,6 @@ SENSOR_TYPES: dict[str, SysMonitorSensorEntityDescription] = {
|
||||
none_is_unavailable=True,
|
||||
add_to_update=lambda entity: ("disks", entity.argument),
|
||||
),
|
||||
"fan_speed": SysMonitorSensorEntityDescription(
|
||||
key="fan_speed",
|
||||
translation_key="fan_speed",
|
||||
placeholder="fan_name",
|
||||
native_unit_of_measurement=REVOLUTIONS_PER_MINUTE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda entity: entity.coordinator.data.fan_speed[entity.argument],
|
||||
none_is_unavailable=True,
|
||||
add_to_update=lambda entity: ("fan_speed", ""),
|
||||
),
|
||||
"ipv4_address": SysMonitorSensorEntityDescription(
|
||||
key="ipv4_address",
|
||||
translation_key="ipv4_address",
|
||||
@@ -303,8 +252,8 @@ SENSOR_TYPES: dict[str, SysMonitorSensorEntityDescription] = {
|
||||
native_unit_of_measurement=UnitOfInformation.MEBIBYTES,
|
||||
device_class=SensorDeviceClass.DATA_SIZE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=(
|
||||
lambda entity: round(entity.coordinator.data.memory.available / 1024**2, 1)
|
||||
value_fn=lambda entity: round(
|
||||
entity.coordinator.data.memory.available / 1024**2, 1
|
||||
),
|
||||
add_to_update=lambda entity: ("memory", ""),
|
||||
),
|
||||
@@ -505,7 +454,6 @@ async def async_setup_entry(
|
||||
return {
|
||||
"disk_arguments": get_all_disk_mounts(hass, psutil_wrapper),
|
||||
"network_arguments": get_all_network_interfaces(hass, psutil_wrapper),
|
||||
"fan_speed_arguments": list(sensor_data.fan_speed),
|
||||
}
|
||||
|
||||
cpu_temperature: float | None = None
|
||||
|
||||
@@ -16,9 +16,6 @@
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"battery_empty": {
|
||||
"name": "Battery empty"
|
||||
},
|
||||
"disk_free": {
|
||||
"name": "Disk free {mount_point}"
|
||||
},
|
||||
@@ -28,9 +25,6 @@
|
||||
"disk_use_percent": {
|
||||
"name": "Disk usage {mount_point}"
|
||||
},
|
||||
"fan_speed": {
|
||||
"name": "{fan_name} fan speed"
|
||||
},
|
||||
"ipv4_address": {
|
||||
"name": "IPv4 address {ip_address}"
|
||||
},
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import logging
|
||||
import os
|
||||
|
||||
from psutil._common import sfan, shwtemp
|
||||
from psutil._common import shwtemp
|
||||
import psutil_home_assistant as ha_psutil
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -89,19 +89,3 @@ def read_cpu_temperature(temps: dict[str, list[shwtemp]]) -> float | None:
|
||||
return round(entry.current, 1)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def read_fan_speed(fans: dict[str, list[sfan]]) -> dict[str, int]:
|
||||
"""Attempt to read fan speed."""
|
||||
entry: sfan
|
||||
|
||||
_LOGGER.debug("Fan speed: %s", fans)
|
||||
if not fans:
|
||||
return {}
|
||||
sensor_fans: dict[str, int] = {}
|
||||
for name, entries in fans.items():
|
||||
for entry in entries:
|
||||
_label = name if not entry.label else entry.label
|
||||
sensor_fans[_label] = round(entry.current, 0)
|
||||
|
||||
return sensor_fans
|
||||
|
||||
@@ -21,7 +21,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from . import TuyaConfigEntry
|
||||
from .const import TUYA_DISCOVERY_NEW, DeviceCategory, DPCode, DPType
|
||||
from .entity import TuyaEntity
|
||||
from .models import EnumTypeData, find_dpcode
|
||||
from .models import EnumTypeData
|
||||
from .util import get_dpcode
|
||||
|
||||
|
||||
@@ -118,8 +118,8 @@ class TuyaAlarmEntity(TuyaEntity, AlarmControlPanelEntity):
|
||||
self._attr_unique_id = f"{super().unique_id}{description.key}"
|
||||
|
||||
# Determine supported modes
|
||||
if supported_modes := find_dpcode(
|
||||
self.device, description.key, dptype=DPType.ENUM, prefer_function=True
|
||||
if supported_modes := self.find_dpcode(
|
||||
description.key, dptype=DPType.ENUM, prefer_function=True
|
||||
):
|
||||
if Mode.HOME in supported_modes.range:
|
||||
self._attr_supported_features |= AlarmControlPanelEntityFeature.ARM_HOME
|
||||
@@ -131,11 +131,8 @@ class TuyaAlarmEntity(TuyaEntity, AlarmControlPanelEntity):
|
||||
self._attr_supported_features |= AlarmControlPanelEntityFeature.TRIGGER
|
||||
|
||||
# Determine master state
|
||||
if enum_type := find_dpcode(
|
||||
self.device,
|
||||
description.master_state,
|
||||
dptype=DPType.ENUM,
|
||||
prefer_function=True,
|
||||
if enum_type := self.find_dpcode(
|
||||
description.master_state, dptype=DPType.ENUM, prefer_function=True
|
||||
):
|
||||
self._master_state = enum_type
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from . import TuyaConfigEntry
|
||||
from .const import TUYA_DISCOVERY_NEW, DeviceCategory, DPCode, DPType
|
||||
from .entity import TuyaEntity
|
||||
from .models import IntegerTypeData, find_dpcode
|
||||
from .models import IntegerTypeData
|
||||
from .util import get_dpcode
|
||||
|
||||
TUYA_HVAC_TO_HA = {
|
||||
@@ -153,13 +153,11 @@ class TuyaClimateEntity(TuyaEntity, ClimateEntity):
|
||||
self._attr_temperature_unit = system_temperature_unit
|
||||
|
||||
# Figure out current temperature, use preferred unit or what is available
|
||||
celsius_type = find_dpcode(
|
||||
self.device, (DPCode.TEMP_CURRENT, DPCode.UPPER_TEMP), dptype=DPType.INTEGER
|
||||
celsius_type = self.find_dpcode(
|
||||
(DPCode.TEMP_CURRENT, DPCode.UPPER_TEMP), dptype=DPType.INTEGER
|
||||
)
|
||||
fahrenheit_type = find_dpcode(
|
||||
self.device,
|
||||
(DPCode.TEMP_CURRENT_F, DPCode.UPPER_TEMP_F),
|
||||
dptype=DPType.INTEGER,
|
||||
fahrenheit_type = self.find_dpcode(
|
||||
(DPCode.TEMP_CURRENT_F, DPCode.UPPER_TEMP_F), dptype=DPType.INTEGER
|
||||
)
|
||||
if fahrenheit_type and (
|
||||
prefered_temperature_unit == UnitOfTemperature.FAHRENHEIT
|
||||
@@ -175,11 +173,11 @@ class TuyaClimateEntity(TuyaEntity, ClimateEntity):
|
||||
self._current_temperature = celsius_type
|
||||
|
||||
# Figure out setting temperature, use preferred unit or what is available
|
||||
celsius_type = find_dpcode(
|
||||
self.device, DPCode.TEMP_SET, dptype=DPType.INTEGER, prefer_function=True
|
||||
celsius_type = self.find_dpcode(
|
||||
DPCode.TEMP_SET, dptype=DPType.INTEGER, prefer_function=True
|
||||
)
|
||||
fahrenheit_type = find_dpcode(
|
||||
self.device, DPCode.TEMP_SET_F, dptype=DPType.INTEGER, prefer_function=True
|
||||
fahrenheit_type = self.find_dpcode(
|
||||
DPCode.TEMP_SET_F, dptype=DPType.INTEGER, prefer_function=True
|
||||
)
|
||||
if fahrenheit_type and (
|
||||
prefered_temperature_unit == UnitOfTemperature.FAHRENHEIT
|
||||
@@ -203,8 +201,8 @@ class TuyaClimateEntity(TuyaEntity, ClimateEntity):
|
||||
# Determine HVAC modes
|
||||
self._attr_hvac_modes: list[HVACMode] = []
|
||||
self._hvac_to_tuya = {}
|
||||
if enum_type := find_dpcode(
|
||||
self.device, DPCode.MODE, dptype=DPType.ENUM, prefer_function=True
|
||||
if enum_type := self.find_dpcode(
|
||||
DPCode.MODE, dptype=DPType.ENUM, prefer_function=True
|
||||
):
|
||||
self._attr_hvac_modes = [HVACMode.OFF]
|
||||
unknown_hvac_modes: list[str] = []
|
||||
@@ -227,11 +225,8 @@ class TuyaClimateEntity(TuyaEntity, ClimateEntity):
|
||||
]
|
||||
|
||||
# Determine dpcode to use for setting the humidity
|
||||
if int_type := find_dpcode(
|
||||
self.device,
|
||||
DPCode.HUMIDITY_SET,
|
||||
dptype=DPType.INTEGER,
|
||||
prefer_function=True,
|
||||
if int_type := self.find_dpcode(
|
||||
DPCode.HUMIDITY_SET, dptype=DPType.INTEGER, prefer_function=True
|
||||
):
|
||||
self._attr_supported_features |= ClimateEntityFeature.TARGET_HUMIDITY
|
||||
self._set_humidity = int_type
|
||||
@@ -239,14 +234,13 @@ class TuyaClimateEntity(TuyaEntity, ClimateEntity):
|
||||
self._attr_max_humidity = int(int_type.max_scaled)
|
||||
|
||||
# Determine dpcode to use for getting the current humidity
|
||||
self._current_humidity = find_dpcode(
|
||||
self.device, DPCode.HUMIDITY_CURRENT, dptype=DPType.INTEGER
|
||||
self._current_humidity = self.find_dpcode(
|
||||
DPCode.HUMIDITY_CURRENT, dptype=DPType.INTEGER
|
||||
)
|
||||
|
||||
# Determine fan modes
|
||||
self._fan_mode_dp_code: str | None = None
|
||||
if enum_type := find_dpcode(
|
||||
self.device,
|
||||
if enum_type := self.find_dpcode(
|
||||
(DPCode.FAN_SPEED_ENUM, DPCode.LEVEL, DPCode.WINDSPEED),
|
||||
dptype=DPType.ENUM,
|
||||
prefer_function=True,
|
||||
|
||||
@@ -22,7 +22,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from . import TuyaConfigEntry
|
||||
from .const import TUYA_DISCOVERY_NEW, DeviceCategory, DPCode, DPType
|
||||
from .entity import TuyaEntity
|
||||
from .models import EnumTypeData, IntegerTypeData, find_dpcode
|
||||
from .models import EnumTypeData, IntegerTypeData
|
||||
from .util import get_dpcode
|
||||
|
||||
|
||||
@@ -204,8 +204,8 @@ class TuyaCoverEntity(TuyaEntity, CoverEntity):
|
||||
self._attr_supported_features |= (
|
||||
CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE
|
||||
)
|
||||
elif enum_type := find_dpcode(
|
||||
self.device, description.key, dptype=DPType.ENUM, prefer_function=True
|
||||
elif enum_type := self.find_dpcode(
|
||||
description.key, dptype=DPType.ENUM, prefer_function=True
|
||||
):
|
||||
if description.open_instruction_value in enum_type.range:
|
||||
self._attr_supported_features |= CoverEntityFeature.OPEN
|
||||
@@ -217,11 +217,8 @@ class TuyaCoverEntity(TuyaEntity, CoverEntity):
|
||||
self._current_state = get_dpcode(self.device, description.current_state)
|
||||
|
||||
# Determine type to use for setting the position
|
||||
if int_type := find_dpcode(
|
||||
self.device,
|
||||
description.set_position,
|
||||
dptype=DPType.INTEGER,
|
||||
prefer_function=True,
|
||||
if int_type := self.find_dpcode(
|
||||
description.set_position, dptype=DPType.INTEGER, prefer_function=True
|
||||
):
|
||||
self._attr_supported_features |= CoverEntityFeature.SET_POSITION
|
||||
self._set_position = int_type
|
||||
@@ -229,17 +226,13 @@ class TuyaCoverEntity(TuyaEntity, CoverEntity):
|
||||
self._current_position = int_type
|
||||
|
||||
# Determine type for getting the position
|
||||
if int_type := find_dpcode(
|
||||
self.device,
|
||||
description.current_position,
|
||||
dptype=DPType.INTEGER,
|
||||
prefer_function=True,
|
||||
if int_type := self.find_dpcode(
|
||||
description.current_position, dptype=DPType.INTEGER, prefer_function=True
|
||||
):
|
||||
self._current_position = int_type
|
||||
|
||||
# Determine type to use for setting the tilt
|
||||
if int_type := find_dpcode(
|
||||
self.device,
|
||||
if int_type := self.find_dpcode(
|
||||
(DPCode.ANGLE_HORIZONTAL, DPCode.ANGLE_VERTICAL),
|
||||
dptype=DPType.INTEGER,
|
||||
prefer_function=True,
|
||||
@@ -249,8 +242,7 @@ class TuyaCoverEntity(TuyaEntity, CoverEntity):
|
||||
|
||||
# Determine type to use for checking motor reverse mode
|
||||
if (motor_mode := description.motor_reverse_mode) and (
|
||||
enum_type := find_dpcode(
|
||||
self.device,
|
||||
enum_type := self.find_dpcode(
|
||||
motor_mode,
|
||||
dptype=DPType.ENUM,
|
||||
prefer_function=True,
|
||||
@@ -319,11 +311,8 @@ class TuyaCoverEntity(TuyaEntity, CoverEntity):
|
||||
def open_cover(self, **kwargs: Any) -> None:
|
||||
"""Open the cover."""
|
||||
value: bool | str = True
|
||||
if find_dpcode(
|
||||
self.device,
|
||||
self.entity_description.key,
|
||||
dptype=DPType.ENUM,
|
||||
prefer_function=True,
|
||||
if self.find_dpcode(
|
||||
self.entity_description.key, dptype=DPType.ENUM, prefer_function=True
|
||||
):
|
||||
value = self.entity_description.open_instruction_value
|
||||
|
||||
@@ -348,11 +337,8 @@ class TuyaCoverEntity(TuyaEntity, CoverEntity):
|
||||
def close_cover(self, **kwargs: Any) -> None:
|
||||
"""Close cover."""
|
||||
value: bool | str = False
|
||||
if find_dpcode(
|
||||
self.device,
|
||||
self.entity_description.key,
|
||||
dptype=DPType.ENUM,
|
||||
prefer_function=True,
|
||||
if self.find_dpcode(
|
||||
self.entity_description.key, dptype=DPType.ENUM, prefer_function=True
|
||||
):
|
||||
value = self.entity_description.close_instruction_value
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
from typing import Any, Literal, overload
|
||||
|
||||
from tuya_sharing import CustomerDevice, Manager
|
||||
|
||||
@@ -10,7 +10,8 @@ from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from .const import DOMAIN, LOGGER, TUYA_HA_SIGNAL_UPDATE_ENTITY
|
||||
from .const import DOMAIN, LOGGER, TUYA_HA_SIGNAL_UPDATE_ENTITY, DPCode, DPType
|
||||
from .models import EnumTypeData, IntegerTypeData
|
||||
|
||||
|
||||
class TuyaEntity(Entity):
|
||||
@@ -43,6 +44,77 @@ class TuyaEntity(Entity):
|
||||
"""Return if the device is available."""
|
||||
return self.device.online
|
||||
|
||||
@overload
|
||||
def find_dpcode(
|
||||
self,
|
||||
dpcodes: str | DPCode | tuple[DPCode, ...] | None,
|
||||
*,
|
||||
prefer_function: bool = False,
|
||||
dptype: Literal[DPType.ENUM],
|
||||
) -> EnumTypeData | None: ...
|
||||
|
||||
@overload
|
||||
def find_dpcode(
|
||||
self,
|
||||
dpcodes: str | DPCode | tuple[DPCode, ...] | None,
|
||||
*,
|
||||
prefer_function: bool = False,
|
||||
dptype: Literal[DPType.INTEGER],
|
||||
) -> IntegerTypeData | None: ...
|
||||
|
||||
def find_dpcode(
|
||||
self,
|
||||
dpcodes: str | DPCode | tuple[DPCode, ...] | None,
|
||||
*,
|
||||
prefer_function: bool = False,
|
||||
dptype: DPType,
|
||||
) -> EnumTypeData | IntegerTypeData | None:
|
||||
"""Find type information for a matching DP code available for this device."""
|
||||
if dptype not in (DPType.ENUM, DPType.INTEGER):
|
||||
raise NotImplementedError("Only ENUM and INTEGER types are supported")
|
||||
|
||||
if dpcodes is None:
|
||||
return None
|
||||
|
||||
if isinstance(dpcodes, str):
|
||||
dpcodes = (DPCode(dpcodes),)
|
||||
elif not isinstance(dpcodes, tuple):
|
||||
dpcodes = (dpcodes,)
|
||||
|
||||
order = ["status_range", "function"]
|
||||
if prefer_function:
|
||||
order = ["function", "status_range"]
|
||||
|
||||
for dpcode in dpcodes:
|
||||
for key in order:
|
||||
if dpcode not in getattr(self.device, key):
|
||||
continue
|
||||
if (
|
||||
dptype == DPType.ENUM
|
||||
and getattr(self.device, key)[dpcode].type == DPType.ENUM
|
||||
):
|
||||
if not (
|
||||
enum_type := EnumTypeData.from_json(
|
||||
dpcode, getattr(self.device, key)[dpcode].values
|
||||
)
|
||||
):
|
||||
continue
|
||||
return enum_type
|
||||
|
||||
if (
|
||||
dptype == DPType.INTEGER
|
||||
and getattr(self.device, key)[dpcode].type == DPType.INTEGER
|
||||
):
|
||||
if not (
|
||||
integer_type := IntegerTypeData.from_json(
|
||||
dpcode, getattr(self.device, key)[dpcode].values
|
||||
)
|
||||
):
|
||||
continue
|
||||
return integer_type
|
||||
|
||||
return None
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Call when entity is added to hass."""
|
||||
self.async_on_remove(
|
||||
|
||||
@@ -16,7 +16,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from . import TuyaConfigEntry
|
||||
from .const import TUYA_DISCOVERY_NEW, DeviceCategory, DPCode, DPType
|
||||
from .entity import TuyaEntity
|
||||
from .models import find_dpcode
|
||||
|
||||
# All descriptions can be found here. Mostly the Enum data types in the
|
||||
# default status set of each category (that don't have a set instruction)
|
||||
@@ -126,7 +125,7 @@ class TuyaEventEntity(TuyaEntity, EventEntity):
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{super().unique_id}{description.key}"
|
||||
|
||||
if dpcode := find_dpcode(self.device, description.key, dptype=DPType.ENUM):
|
||||
if dpcode := self.find_dpcode(description.key, dptype=DPType.ENUM):
|
||||
self._attr_event_types: list[str] = dpcode.range
|
||||
|
||||
async def _handle_state_update(
|
||||
|
||||
@@ -23,7 +23,7 @@ from homeassistant.util.percentage import (
|
||||
from . import TuyaConfigEntry
|
||||
from .const import TUYA_DISCOVERY_NEW, DeviceCategory, DPCode, DPType
|
||||
from .entity import TuyaEntity
|
||||
from .models import EnumTypeData, IntegerTypeData, find_dpcode
|
||||
from .models import EnumTypeData, IntegerTypeData
|
||||
from .util import get_dpcode
|
||||
|
||||
_DIRECTION_DPCODES = (DPCode.FAN_DIRECTION,)
|
||||
@@ -106,24 +106,21 @@ class TuyaFanEntity(TuyaEntity, FanEntity):
|
||||
self._switch = get_dpcode(self.device, _SWITCH_DPCODES)
|
||||
|
||||
self._attr_preset_modes = []
|
||||
if enum_type := find_dpcode(
|
||||
self.device,
|
||||
(DPCode.FAN_MODE, DPCode.MODE),
|
||||
dptype=DPType.ENUM,
|
||||
prefer_function=True,
|
||||
if enum_type := self.find_dpcode(
|
||||
(DPCode.FAN_MODE, DPCode.MODE), dptype=DPType.ENUM, prefer_function=True
|
||||
):
|
||||
self._presets = enum_type
|
||||
self._attr_supported_features |= FanEntityFeature.PRESET_MODE
|
||||
self._attr_preset_modes = enum_type.range
|
||||
|
||||
# Find speed controls, can be either percentage or a set of speeds
|
||||
if int_type := find_dpcode(
|
||||
self.device, _SPEED_DPCODES, dptype=DPType.INTEGER, prefer_function=True
|
||||
if int_type := self.find_dpcode(
|
||||
_SPEED_DPCODES, dptype=DPType.INTEGER, prefer_function=True
|
||||
):
|
||||
self._attr_supported_features |= FanEntityFeature.SET_SPEED
|
||||
self._speed = int_type
|
||||
elif enum_type := find_dpcode(
|
||||
self.device, _SPEED_DPCODES, dptype=DPType.ENUM, prefer_function=True
|
||||
elif enum_type := self.find_dpcode(
|
||||
_SPEED_DPCODES, dptype=DPType.ENUM, prefer_function=True
|
||||
):
|
||||
self._attr_supported_features |= FanEntityFeature.SET_SPEED
|
||||
self._speeds = enum_type
|
||||
@@ -132,8 +129,8 @@ class TuyaFanEntity(TuyaEntity, FanEntity):
|
||||
self._oscillate = dpcode
|
||||
self._attr_supported_features |= FanEntityFeature.OSCILLATE
|
||||
|
||||
if enum_type := find_dpcode(
|
||||
self.device, _DIRECTION_DPCODES, dptype=DPType.ENUM, prefer_function=True
|
||||
if enum_type := self.find_dpcode(
|
||||
_DIRECTION_DPCODES, dptype=DPType.ENUM, prefer_function=True
|
||||
):
|
||||
self._direction = enum_type
|
||||
self._attr_supported_features |= FanEntityFeature.DIRECTION
|
||||
|
||||
@@ -20,7 +20,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from . import TuyaConfigEntry
|
||||
from .const import TUYA_DISCOVERY_NEW, DeviceCategory, DPCode, DPType
|
||||
from .entity import TuyaEntity
|
||||
from .models import IntegerTypeData, find_dpcode
|
||||
from .models import IntegerTypeData
|
||||
from .util import ActionDPCodeNotFoundError, get_dpcode
|
||||
|
||||
|
||||
@@ -120,27 +120,23 @@ class TuyaHumidifierEntity(TuyaEntity, HumidifierEntity):
|
||||
)
|
||||
|
||||
# Determine humidity parameters
|
||||
if int_type := find_dpcode(
|
||||
self.device,
|
||||
description.humidity,
|
||||
dptype=DPType.INTEGER,
|
||||
prefer_function=True,
|
||||
if int_type := self.find_dpcode(
|
||||
description.humidity, dptype=DPType.INTEGER, prefer_function=True
|
||||
):
|
||||
self._set_humidity = int_type
|
||||
self._attr_min_humidity = int(int_type.min_scaled)
|
||||
self._attr_max_humidity = int(int_type.max_scaled)
|
||||
|
||||
# Determine current humidity DPCode
|
||||
if int_type := find_dpcode(
|
||||
self.device,
|
||||
if int_type := self.find_dpcode(
|
||||
description.current_humidity,
|
||||
dptype=DPType.INTEGER,
|
||||
):
|
||||
self._current_humidity = int_type
|
||||
|
||||
# Determine mode support and provided modes
|
||||
if enum_type := find_dpcode(
|
||||
self.device, DPCode.MODE, dptype=DPType.ENUM, prefer_function=True
|
||||
if enum_type := self.find_dpcode(
|
||||
DPCode.MODE, dptype=DPType.ENUM, prefer_function=True
|
||||
):
|
||||
self._attr_supported_features |= HumidifierEntityFeature.MODES
|
||||
self._attr_available_modes = enum_type.range
|
||||
|
||||
@@ -28,7 +28,7 @@ from homeassistant.util import color as color_util
|
||||
from . import TuyaConfigEntry
|
||||
from .const import TUYA_DISCOVERY_NEW, DeviceCategory, DPCode, DPType, WorkMode
|
||||
from .entity import TuyaEntity
|
||||
from .models import IntegerTypeData, find_dpcode
|
||||
from .models import IntegerTypeData
|
||||
from .util import get_dpcode, get_dptype, remap_value
|
||||
|
||||
|
||||
@@ -466,19 +466,16 @@ class TuyaLightEntity(TuyaEntity, LightEntity):
|
||||
# Determine DPCodes
|
||||
self._color_mode_dpcode = get_dpcode(self.device, description.color_mode)
|
||||
|
||||
if int_type := find_dpcode(
|
||||
self.device,
|
||||
description.brightness,
|
||||
dptype=DPType.INTEGER,
|
||||
prefer_function=True,
|
||||
if int_type := self.find_dpcode(
|
||||
description.brightness, dptype=DPType.INTEGER, prefer_function=True
|
||||
):
|
||||
self._brightness = int_type
|
||||
color_modes.add(ColorMode.BRIGHTNESS)
|
||||
self._brightness_max = find_dpcode(
|
||||
self.device, description.brightness_max, dptype=DPType.INTEGER
|
||||
self._brightness_max = self.find_dpcode(
|
||||
description.brightness_max, dptype=DPType.INTEGER
|
||||
)
|
||||
self._brightness_min = find_dpcode(
|
||||
self.device, description.brightness_min, dptype=DPType.INTEGER
|
||||
self._brightness_min = self.find_dpcode(
|
||||
description.brightness_min, dptype=DPType.INTEGER
|
||||
)
|
||||
|
||||
if (dpcode := get_dpcode(self.device, description.color_data)) and (
|
||||
@@ -507,11 +504,8 @@ class TuyaLightEntity(TuyaEntity, LightEntity):
|
||||
self._color_data_type = DEFAULT_COLOR_TYPE_DATA_V2
|
||||
|
||||
# Check if the light has color temperature
|
||||
if int_type := find_dpcode(
|
||||
self.device,
|
||||
description.color_temp,
|
||||
dptype=DPType.INTEGER,
|
||||
prefer_function=True,
|
||||
if int_type := self.find_dpcode(
|
||||
description.color_temp, dptype=DPType.INTEGER, prefer_function=True
|
||||
):
|
||||
self._color_temp = int_type
|
||||
color_modes.add(ColorMode.COLOR_TEMP)
|
||||
@@ -520,11 +514,8 @@ class TuyaLightEntity(TuyaEntity, LightEntity):
|
||||
elif (
|
||||
color_supported(color_modes)
|
||||
and (
|
||||
color_mode_enum := find_dpcode(
|
||||
self.device,
|
||||
description.color_mode,
|
||||
dptype=DPType.ENUM,
|
||||
prefer_function=True,
|
||||
color_mode_enum := self.find_dpcode(
|
||||
description.color_mode, dptype=DPType.ENUM, prefer_function=True
|
||||
)
|
||||
)
|
||||
and WorkMode.WHITE.value in color_mode_enum.range
|
||||
|
||||
@@ -6,86 +6,12 @@ import base64
|
||||
from dataclasses import dataclass
|
||||
import json
|
||||
import struct
|
||||
from typing import Literal, Self, overload
|
||||
from typing import Self
|
||||
|
||||
from tuya_sharing import CustomerDevice
|
||||
|
||||
from .const import DPCode, DPType
|
||||
from .const import DPCode
|
||||
from .util import remap_value
|
||||
|
||||
|
||||
@overload
|
||||
def find_dpcode(
|
||||
device: CustomerDevice,
|
||||
dpcodes: str | DPCode | tuple[DPCode, ...] | None,
|
||||
*,
|
||||
prefer_function: bool = False,
|
||||
dptype: Literal[DPType.ENUM],
|
||||
) -> EnumTypeData | None: ...
|
||||
|
||||
|
||||
@overload
|
||||
def find_dpcode(
|
||||
device: CustomerDevice,
|
||||
dpcodes: str | DPCode | tuple[DPCode, ...] | None,
|
||||
*,
|
||||
prefer_function: bool = False,
|
||||
dptype: Literal[DPType.INTEGER],
|
||||
) -> IntegerTypeData | None: ...
|
||||
|
||||
|
||||
def find_dpcode(
|
||||
device: CustomerDevice,
|
||||
dpcodes: str | DPCode | tuple[DPCode, ...] | None,
|
||||
*,
|
||||
prefer_function: bool = False,
|
||||
dptype: DPType,
|
||||
) -> EnumTypeData | IntegerTypeData | None:
|
||||
"""Find type information for a matching DP code available for this device."""
|
||||
if dptype not in (DPType.ENUM, DPType.INTEGER):
|
||||
raise NotImplementedError("Only ENUM and INTEGER types are supported")
|
||||
|
||||
if dpcodes is None:
|
||||
return None
|
||||
|
||||
if isinstance(dpcodes, str):
|
||||
dpcodes = (DPCode(dpcodes),)
|
||||
elif not isinstance(dpcodes, tuple):
|
||||
dpcodes = (dpcodes,)
|
||||
|
||||
lookup_tuple = (
|
||||
(device.function, device.status_range)
|
||||
if prefer_function
|
||||
else (device.status_range, device.function)
|
||||
)
|
||||
|
||||
for dpcode in dpcodes:
|
||||
for device_specs in lookup_tuple:
|
||||
if not (
|
||||
(current_definition := device_specs.get(dpcode))
|
||||
and current_definition.type == dptype
|
||||
):
|
||||
continue
|
||||
if dptype is DPType.ENUM:
|
||||
if not (
|
||||
enum_type := EnumTypeData.from_json(
|
||||
dpcode, current_definition.values
|
||||
)
|
||||
):
|
||||
continue
|
||||
return enum_type
|
||||
if dptype is DPType.INTEGER:
|
||||
if not (
|
||||
integer_type := IntegerTypeData.from_json(
|
||||
dpcode, current_definition.values
|
||||
)
|
||||
):
|
||||
continue
|
||||
return integer_type
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@dataclass
|
||||
class IntegerTypeData:
|
||||
"""Integer Type Data."""
|
||||
|
||||
@@ -26,7 +26,7 @@ from .const import (
|
||||
DPType,
|
||||
)
|
||||
from .entity import TuyaEntity
|
||||
from .models import IntegerTypeData, find_dpcode
|
||||
from .models import IntegerTypeData
|
||||
from .util import ActionDPCodeNotFoundError
|
||||
|
||||
NUMBERS: dict[DeviceCategory, tuple[NumberEntityDescription, ...]] = {
|
||||
@@ -486,8 +486,8 @@ class TuyaNumberEntity(TuyaEntity, NumberEntity):
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{super().unique_id}{description.key}"
|
||||
|
||||
if int_type := find_dpcode(
|
||||
self.device, description.key, dptype=DPType.INTEGER, prefer_function=True
|
||||
if int_type := self.find_dpcode(
|
||||
description.key, dptype=DPType.INTEGER, prefer_function=True
|
||||
):
|
||||
self._number = int_type
|
||||
self._attr_native_max_value = self._number.max_scaled
|
||||
|
||||
@@ -13,7 +13,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from . import TuyaConfigEntry
|
||||
from .const import TUYA_DISCOVERY_NEW, DeviceCategory, DPCode, DPType
|
||||
from .entity import TuyaEntity
|
||||
from .models import find_dpcode
|
||||
|
||||
# All descriptions can be found here. Mostly the Enum data types in the
|
||||
# default instructions set of each category end up being a select.
|
||||
@@ -389,8 +388,8 @@ class TuyaSelectEntity(TuyaEntity, SelectEntity):
|
||||
self._attr_unique_id = f"{super().unique_id}{description.key}"
|
||||
|
||||
self._attr_options: list[str] = []
|
||||
if enum_type := find_dpcode(
|
||||
self.device, description.key, dptype=DPType.ENUM, prefer_function=True
|
||||
if enum_type := self.find_dpcode(
|
||||
description.key, dptype=DPType.ENUM, prefer_function=True
|
||||
):
|
||||
self._attr_options = enum_type.range
|
||||
|
||||
|
||||
@@ -41,13 +41,7 @@ from .const import (
|
||||
DPType,
|
||||
)
|
||||
from .entity import TuyaEntity
|
||||
from .models import (
|
||||
ComplexValue,
|
||||
ElectricityValue,
|
||||
EnumTypeData,
|
||||
IntegerTypeData,
|
||||
find_dpcode,
|
||||
)
|
||||
from .models import ComplexValue, ElectricityValue, EnumTypeData, IntegerTypeData
|
||||
from .util import get_dptype
|
||||
|
||||
_WIND_DIRECTIONS = {
|
||||
@@ -1690,13 +1684,13 @@ class TuyaSensorEntity(TuyaEntity, SensorEntity):
|
||||
f"{super().unique_id}{description.key}{description.subkey or ''}"
|
||||
)
|
||||
|
||||
if int_type := find_dpcode(self.device, description.key, dptype=DPType.INTEGER):
|
||||
if int_type := self.find_dpcode(description.key, dptype=DPType.INTEGER):
|
||||
self._type_data = int_type
|
||||
self._type = DPType.INTEGER
|
||||
if description.native_unit_of_measurement is None:
|
||||
self._attr_native_unit_of_measurement = int_type.unit
|
||||
elif enum_type := find_dpcode(
|
||||
self.device, description.key, dptype=DPType.ENUM, prefer_function=True
|
||||
elif enum_type := self.find_dpcode(
|
||||
description.key, dptype=DPType.ENUM, prefer_function=True
|
||||
):
|
||||
self._type_data = enum_type
|
||||
self._type = DPType.ENUM
|
||||
|
||||
@@ -18,7 +18,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from . import TuyaConfigEntry
|
||||
from .const import TUYA_DISCOVERY_NEW, DeviceCategory, DPCode, DPType
|
||||
from .entity import TuyaEntity
|
||||
from .models import EnumTypeData, find_dpcode
|
||||
from .models import EnumTypeData
|
||||
from .util import get_dpcode
|
||||
|
||||
TUYA_MODE_RETURN_HOME = "chargego"
|
||||
@@ -97,8 +97,8 @@ class TuyaVacuumEntity(TuyaEntity, StateVacuumEntity):
|
||||
self._attr_supported_features |= VacuumEntityFeature.RETURN_HOME
|
||||
self._return_home_use_switch_charge = True
|
||||
elif (
|
||||
enum_type := find_dpcode(
|
||||
self.device, DPCode.MODE, dptype=DPType.ENUM, prefer_function=True
|
||||
enum_type := self.find_dpcode(
|
||||
DPCode.MODE, dptype=DPType.ENUM, prefer_function=True
|
||||
)
|
||||
) and TUYA_MODE_RETURN_HOME in enum_type.range:
|
||||
self._attr_supported_features |= VacuumEntityFeature.RETURN_HOME
|
||||
@@ -111,8 +111,8 @@ class TuyaVacuumEntity(TuyaEntity, StateVacuumEntity):
|
||||
VacuumEntityFeature.STOP | VacuumEntityFeature.START
|
||||
)
|
||||
|
||||
if enum_type := find_dpcode(
|
||||
self.device, DPCode.SUCTION, dptype=DPType.ENUM, prefer_function=True
|
||||
if enum_type := self.find_dpcode(
|
||||
DPCode.SUCTION, dptype=DPType.ENUM, prefer_function=True
|
||||
):
|
||||
self._fan_speed = enum_type
|
||||
self._attr_fan_speed_list = enum_type.range
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
"zha",
|
||||
"universal_silabs_flasher"
|
||||
],
|
||||
"requirements": ["zha==0.0.78"],
|
||||
"requirements": ["zha==0.0.77"],
|
||||
"usb": [
|
||||
{
|
||||
"description": "*2652*",
|
||||
|
||||
@@ -3,9 +3,16 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import StrEnum
|
||||
from functools import partial
|
||||
from typing import TYPE_CHECKING, Final
|
||||
|
||||
from .generated.entity_platforms import EntityPlatforms
|
||||
from .helpers.deprecation import (
|
||||
DeprecatedConstantEnum,
|
||||
all_with_deprecated_constants,
|
||||
check_if_deprecated_constant,
|
||||
dir_with_deprecated_constants,
|
||||
)
|
||||
from .util.event_type import EventType
|
||||
from .util.hass_dict import HassKey
|
||||
from .util.signal_type import SignalType
|
||||
@@ -480,6 +487,13 @@ class UnitOfReactivePower(StrEnum):
|
||||
KILO_VOLT_AMPERE_REACTIVE = "kvar"
|
||||
|
||||
|
||||
_DEPRECATED_POWER_VOLT_AMPERE_REACTIVE: Final = DeprecatedConstantEnum(
|
||||
UnitOfReactivePower.VOLT_AMPERE_REACTIVE,
|
||||
"2025.9",
|
||||
)
|
||||
"""Deprecated: please use UnitOfReactivePower.VOLT_AMPERE_REACTIVE."""
|
||||
|
||||
|
||||
# Energy units
|
||||
class UnitOfEnergy(StrEnum):
|
||||
"""Energy units."""
|
||||
@@ -671,6 +685,13 @@ class UnitOfArea(StrEnum):
|
||||
HECTARES = "ha"
|
||||
|
||||
|
||||
_DEPRECATED_AREA_SQUARE_METERS: Final = DeprecatedConstantEnum(
|
||||
UnitOfArea.SQUARE_METERS,
|
||||
"2025.12",
|
||||
)
|
||||
"""Deprecated: please use UnitOfArea.SQUARE_METERS"""
|
||||
|
||||
|
||||
# Mass units
|
||||
class UnitOfMass(StrEnum):
|
||||
"""Mass units."""
|
||||
@@ -993,3 +1014,10 @@ FORMAT_DATETIME: Final = f"{FORMAT_DATE} {FORMAT_TIME}"
|
||||
# This is not a hard limit, but caches and other
|
||||
# data structures will be pre-allocated to this size
|
||||
MAX_EXPECTED_ENTITY_IDS: Final = 16384
|
||||
|
||||
# These can be removed if no deprecated constant are in this module anymore
|
||||
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
|
||||
__dir__ = partial(
|
||||
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
|
||||
)
|
||||
__all__ = all_with_deprecated_constants(globals())
|
||||
|
||||
@@ -645,12 +645,24 @@ class FlowHandler(Generic[_FlowContextT, _FlowResultT, _HandlerT]):
|
||||
__progress_task: asyncio.Task[Any] | None = None
|
||||
__no_progress_task_reported = False
|
||||
deprecated_show_progress = False
|
||||
_progress_step_data: ProgressStepData[_FlowResultT] = {
|
||||
"tasks": {},
|
||||
"abort_reason": "",
|
||||
"abort_description_placeholders": MappingProxyType({}),
|
||||
"next_step_result": None,
|
||||
}
|
||||
__progress_step_data: ProgressStepData[_FlowResultT] | None = None
|
||||
|
||||
@property
|
||||
def _progress_step_data(self) -> ProgressStepData[_FlowResultT]:
|
||||
"""Return progress step data.
|
||||
|
||||
A property is used instead of a simple attribute as derived classes
|
||||
do not call super().__init__.
|
||||
The property makes sure that the dict is initialized if needed.
|
||||
"""
|
||||
if not self.__progress_step_data:
|
||||
self.__progress_step_data = {
|
||||
"tasks": {},
|
||||
"abort_reason": "",
|
||||
"abort_description_placeholders": MappingProxyType({}),
|
||||
"next_step_result": None,
|
||||
}
|
||||
return self.__progress_step_data
|
||||
|
||||
@property
|
||||
def source(self) -> str | None:
|
||||
@@ -777,9 +789,10 @@ class FlowHandler(Generic[_FlowContextT, _FlowResultT, _HandlerT]):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> _FlowResultT:
|
||||
"""Abort the flow."""
|
||||
progress_step_data = self._progress_step_data
|
||||
return self.async_abort(
|
||||
reason=self._progress_step_data["abort_reason"],
|
||||
description_placeholders=self._progress_step_data[
|
||||
reason=progress_step_data["abort_reason"],
|
||||
description_placeholders=progress_step_data[
|
||||
"abort_description_placeholders"
|
||||
],
|
||||
)
|
||||
@@ -795,14 +808,15 @@ class FlowHandler(Generic[_FlowContextT, _FlowResultT, _HandlerT]):
|
||||
without using async_show_progress_done.
|
||||
If no next step is set, abort the flow.
|
||||
"""
|
||||
if self._progress_step_data["next_step_result"] is None:
|
||||
progress_step_data = self._progress_step_data
|
||||
if (next_step_result := progress_step_data["next_step_result"]) is None:
|
||||
return self.async_abort(
|
||||
reason=self._progress_step_data["abort_reason"],
|
||||
description_placeholders=self._progress_step_data[
|
||||
reason=progress_step_data["abort_reason"],
|
||||
description_placeholders=progress_step_data[
|
||||
"abort_description_placeholders"
|
||||
],
|
||||
)
|
||||
return self._progress_step_data["next_step_result"]
|
||||
return next_step_result
|
||||
|
||||
@callback
|
||||
def async_external_step(
|
||||
@@ -1021,9 +1035,9 @@ def progress_step[
|
||||
self: FlowHandler[Any, ResultT], *args: P.args, **kwargs: P.kwargs
|
||||
) -> ResultT:
|
||||
step_id = func.__name__.replace("async_step_", "")
|
||||
|
||||
progress_step_data = self._progress_step_data
|
||||
# Check if we have a progress task running
|
||||
progress_task = self._progress_step_data["tasks"].get(step_id)
|
||||
progress_task = progress_step_data["tasks"].get(step_id)
|
||||
|
||||
if progress_task is None:
|
||||
# First call - create and start the progress task
|
||||
@@ -1031,30 +1045,30 @@ def progress_step[
|
||||
func(self, *args, **kwargs), # type: ignore[arg-type]
|
||||
f"Progress step {step_id}",
|
||||
)
|
||||
self._progress_step_data["tasks"][step_id] = progress_task
|
||||
progress_step_data["tasks"][step_id] = progress_task
|
||||
|
||||
if not progress_task.done():
|
||||
# Handle description placeholders
|
||||
placeholders = None
|
||||
if description_placeholders is not None:
|
||||
if callable(description_placeholders):
|
||||
placeholders = description_placeholders(self)
|
||||
else:
|
||||
placeholders = description_placeholders
|
||||
if not progress_task.done():
|
||||
# Handle description placeholders
|
||||
placeholders = None
|
||||
if description_placeholders is not None:
|
||||
if callable(description_placeholders):
|
||||
placeholders = description_placeholders(self)
|
||||
else:
|
||||
placeholders = description_placeholders
|
||||
|
||||
return self.async_show_progress(
|
||||
step_id=step_id,
|
||||
progress_action=step_id,
|
||||
progress_task=progress_task,
|
||||
description_placeholders=placeholders,
|
||||
)
|
||||
return self.async_show_progress(
|
||||
step_id=step_id,
|
||||
progress_action=step_id,
|
||||
progress_task=progress_task,
|
||||
description_placeholders=placeholders,
|
||||
)
|
||||
|
||||
# Task is done or this is a subsequent call
|
||||
try:
|
||||
self._progress_step_data["next_step_result"] = await progress_task
|
||||
progress_step_data["next_step_result"] = await progress_task
|
||||
except AbortFlow as err:
|
||||
self._progress_step_data["abort_reason"] = err.reason
|
||||
self._progress_step_data["abort_description_placeholders"] = (
|
||||
progress_step_data["abort_reason"] = err.reason
|
||||
progress_step_data["abort_description_placeholders"] = (
|
||||
err.description_placeholders or {}
|
||||
)
|
||||
return self.async_show_progress_done(
|
||||
@@ -1062,7 +1076,7 @@ def progress_step[
|
||||
)
|
||||
finally:
|
||||
# Clean up task reference
|
||||
self._progress_step_data["tasks"].pop(step_id, None)
|
||||
progress_step_data["tasks"].pop(step_id, None)
|
||||
|
||||
return self.async_show_progress_done(
|
||||
next_step_id="_progress_step_progress_done"
|
||||
|
||||
@@ -90,9 +90,7 @@ def run(script_args: list) -> int:
|
||||
help="Exit non-zero if warnings are present",
|
||||
)
|
||||
|
||||
# Parse all args including --config & --script. Do not use script_args.
|
||||
# Example: python -m homeassistant --config "." --script check_config
|
||||
args, unknown = parser.parse_known_args()
|
||||
args, unknown = parser.parse_known_args(script_args)
|
||||
if unknown:
|
||||
print(color("red", "Unknown arguments:", ", ".join(unknown)))
|
||||
|
||||
|
||||
4
requirements_all.txt
generated
4
requirements_all.txt
generated
@@ -190,7 +190,7 @@ aioairzone-cloud==0.7.2
|
||||
aioairzone==1.0.2
|
||||
|
||||
# homeassistant.components.alexa_devices
|
||||
aioamazondevices==8.0.1
|
||||
aioamazondevices==6.5.6
|
||||
|
||||
# homeassistant.components.ambient_network
|
||||
# homeassistant.components.ambient_station
|
||||
@@ -3222,7 +3222,7 @@ zeroconf==0.148.0
|
||||
zeversolar==0.3.2
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha==0.0.78
|
||||
zha==0.0.77
|
||||
|
||||
# homeassistant.components.zhong_hong
|
||||
zhong-hong-hvac==1.0.13
|
||||
|
||||
4
requirements_test_all.txt
generated
4
requirements_test_all.txt
generated
@@ -178,7 +178,7 @@ aioairzone-cloud==0.7.2
|
||||
aioairzone==1.0.2
|
||||
|
||||
# homeassistant.components.alexa_devices
|
||||
aioamazondevices==8.0.1
|
||||
aioamazondevices==6.5.6
|
||||
|
||||
# homeassistant.components.ambient_network
|
||||
# homeassistant.components.ambient_station
|
||||
@@ -2665,7 +2665,7 @@ zeroconf==0.148.0
|
||||
zeversolar==0.3.2
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha==0.0.78
|
||||
zha==0.0.77
|
||||
|
||||
# homeassistant.components.zwave_js
|
||||
zwave-js-server-python==0.67.1
|
||||
|
||||
@@ -4,7 +4,7 @@ from collections.abc import Generator
|
||||
from copy import deepcopy
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from aioamazondevices.const.devices import DEVICE_TYPE_TO_MODEL
|
||||
from aioamazondevices.const import DEVICE_TYPE_TO_MODEL
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.alexa_devices.const import (
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from aioamazondevices.const.schedules import (
|
||||
from aioamazondevices.api import AmazonDevice, AmazonDeviceSensor, AmazonSchedule
|
||||
from aioamazondevices.const import (
|
||||
NOTIFICATION_ALARM,
|
||||
NOTIFICATION_REMINDER,
|
||||
NOTIFICATION_TIMER,
|
||||
)
|
||||
from aioamazondevices.structures import AmazonDevice, AmazonDeviceSensor, AmazonSchedule
|
||||
|
||||
TEST_CODE = "023123"
|
||||
TEST_PASSWORD = "fake_password"
|
||||
|
||||
@@ -14,11 +14,11 @@
|
||||
'online': True,
|
||||
'sensors': dict({
|
||||
'dnd': dict({
|
||||
'__type': "<class 'aioamazondevices.structures.AmazonDeviceSensor'>",
|
||||
'__type': "<class 'aioamazondevices.api.AmazonDeviceSensor'>",
|
||||
'repr': "AmazonDeviceSensor(name='dnd', value=False, error=False, error_type=None, error_msg=None, scale=None)",
|
||||
}),
|
||||
'temperature': dict({
|
||||
'__type': "<class 'aioamazondevices.structures.AmazonDeviceSensor'>",
|
||||
'__type': "<class 'aioamazondevices.api.AmazonDeviceSensor'>",
|
||||
'repr': "AmazonDeviceSensor(name='temperature', value='22.5', error=False, error_type=None, error_msg=None, scale='CELSIUS')",
|
||||
}),
|
||||
}),
|
||||
@@ -44,11 +44,11 @@
|
||||
'online': True,
|
||||
'sensors': dict({
|
||||
'dnd': dict({
|
||||
'__type': "<class 'aioamazondevices.structures.AmazonDeviceSensor'>",
|
||||
'__type': "<class 'aioamazondevices.api.AmazonDeviceSensor'>",
|
||||
'repr': "AmazonDeviceSensor(name='dnd', value=False, error=False, error_type=None, error_msg=None, scale=None)",
|
||||
}),
|
||||
'temperature': dict({
|
||||
'__type': "<class 'aioamazondevices.structures.AmazonDeviceSensor'>",
|
||||
'__type': "<class 'aioamazondevices.api.AmazonDeviceSensor'>",
|
||||
'repr': "AmazonDeviceSensor(name='temperature', value='22.5', error=False, error_type=None, error_msg=None, scale='CELSIUS')",
|
||||
}),
|
||||
}),
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
from aioamazondevices.const.devices import SPEAKER_GROUP_FAMILY, SPEAKER_GROUP_MODEL
|
||||
from aioamazondevices.const import SPEAKER_GROUP_FAMILY, SPEAKER_GROUP_MODEL
|
||||
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
||||
import pytest
|
||||
|
||||
|
||||
@@ -218,7 +218,6 @@ async def test_async_step_integration_discovery(
|
||||
assert result["result"].unique_id == "C666666"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_ezviz_client")
|
||||
async def test_options_flow(
|
||||
hass: HomeAssistant, mock_config_entry: MockConfigEntry
|
||||
) -> None:
|
||||
|
||||
@@ -122,7 +122,7 @@
|
||||
'validDPTs': list([
|
||||
dict({
|
||||
'main': 9,
|
||||
'sub': 7,
|
||||
'sub': 2,
|
||||
}),
|
||||
]),
|
||||
'write': False,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Tests for the LibreHardwareMonitor init."""
|
||||
|
||||
import pytest
|
||||
import logging
|
||||
|
||||
from homeassistant.components.libre_hardware_monitor.const import DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -11,8 +11,9 @@ from .conftest import VALID_CONFIG
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_lhm_client")
|
||||
async def test_migration_to_unique_ids(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
|
||||
@@ -2873,55 +2873,6 @@
|
||||
'state': 'unknown',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_states[platforms0][sensor.oven_finish-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.oven_finish',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.TIMESTAMP: 'timestamp'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Finish',
|
||||
'platform': 'miele',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'finish',
|
||||
'unique_id': 'DummyAppliance_12-state_finish_timestamp',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_states[platforms0][sensor.oven_finish-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'timestamp',
|
||||
'friendly_name': 'Oven Finish',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.oven_finish',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'unknown',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_states[platforms0][sensor.oven_program-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
@@ -3471,55 +3422,6 @@
|
||||
'state': 'unknown',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_states[platforms0][sensor.oven_start-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.oven_start',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.TIMESTAMP: 'timestamp'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Start',
|
||||
'platform': 'miele',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'start',
|
||||
'unique_id': 'DummyAppliance_12-state_start_timestamp',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_states[platforms0][sensor.oven_start-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'timestamp',
|
||||
'friendly_name': 'Oven Start',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.oven_start',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'unknown',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_states[platforms0][sensor.oven_start_in-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
@@ -4084,55 +3986,6 @@
|
||||
'state': '10.0',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_states[platforms0][sensor.washing_machine_finish-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.washing_machine_finish',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.TIMESTAMP: 'timestamp'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Finish',
|
||||
'platform': 'miele',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'finish',
|
||||
'unique_id': 'Dummy_Appliance_3-state_finish_timestamp',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_states[platforms0][sensor.washing_machine_finish-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'timestamp',
|
||||
'friendly_name': 'Washing machine Finish',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.washing_machine_finish',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'unknown',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_states[platforms0][sensor.washing_machine_program-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
@@ -4513,55 +4366,6 @@
|
||||
'state': 'unknown',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_states[platforms0][sensor.washing_machine_start-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.washing_machine_start',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.TIMESTAMP: 'timestamp'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Start',
|
||||
'platform': 'miele',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'start',
|
||||
'unique_id': 'Dummy_Appliance_3-state_start_timestamp',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_states[platforms0][sensor.washing_machine_start-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'timestamp',
|
||||
'friendly_name': 'Washing machine Start',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.washing_machine_start',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'unknown',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_states[platforms0][sensor.washing_machine_start_in-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
@@ -5217,55 +5021,6 @@
|
||||
'state': '0',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_states_api_push[platforms0][sensor.oven_finish-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.oven_finish',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.TIMESTAMP: 'timestamp'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Finish',
|
||||
'platform': 'miele',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'finish',
|
||||
'unique_id': 'DummyAppliance_12-state_finish_timestamp',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_states_api_push[platforms0][sensor.oven_finish-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'timestamp',
|
||||
'friendly_name': 'Oven Finish',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.oven_finish',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '2025-05-31T12:35:00+00:00',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_states_api_push[platforms0][sensor.oven_program-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
@@ -5815,55 +5570,6 @@
|
||||
'state': '5',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_states_api_push[platforms0][sensor.oven_start-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.oven_start',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.TIMESTAMP: 'timestamp'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Start',
|
||||
'platform': 'miele',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'start',
|
||||
'unique_id': 'DummyAppliance_12-state_start_timestamp',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_states_api_push[platforms0][sensor.oven_start-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'timestamp',
|
||||
'friendly_name': 'Oven Start',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.oven_start',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'unknown',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_states_api_push[platforms0][sensor.oven_start_in-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
@@ -6428,55 +6134,6 @@
|
||||
'state': '10.0',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_states_api_push[platforms0][sensor.washing_machine_finish-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.washing_machine_finish',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.TIMESTAMP: 'timestamp'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Finish',
|
||||
'platform': 'miele',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'finish',
|
||||
'unique_id': 'Dummy_Appliance_3-state_finish_timestamp',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_states_api_push[platforms0][sensor.washing_machine_finish-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'timestamp',
|
||||
'friendly_name': 'Washing machine Finish',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.washing_machine_finish',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'unknown',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_states_api_push[platforms0][sensor.washing_machine_program-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
@@ -6857,55 +6514,6 @@
|
||||
'state': 'unknown',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_states_api_push[platforms0][sensor.washing_machine_start-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.washing_machine_start',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.TIMESTAMP: 'timestamp'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Start',
|
||||
'platform': 'miele',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'start',
|
||||
'unique_id': 'Dummy_Appliance_3-state_start_timestamp',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_states_api_push[platforms0][sensor.washing_machine_start-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'timestamp',
|
||||
'friendly_name': 'Washing machine Start',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.washing_machine_start',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'unknown',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_states_api_push[platforms0][sensor.washing_machine_start_in-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
@@ -7317,55 +6925,6 @@
|
||||
'state': 'unknown',
|
||||
})
|
||||
# ---
|
||||
# name: test_vacuum_sensor_states[platforms0-vacuum_device.json][sensor.robot_vacuum_cleaner_finish-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.robot_vacuum_cleaner_finish',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.TIMESTAMP: 'timestamp'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Finish',
|
||||
'platform': 'miele',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'finish',
|
||||
'unique_id': 'Dummy_Vacuum_1-state_finish_timestamp',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_vacuum_sensor_states[platforms0-vacuum_device.json][sensor.robot_vacuum_cleaner_finish-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'timestamp',
|
||||
'friendly_name': 'Robot vacuum cleaner Finish',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.robot_vacuum_cleaner_finish',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'unknown',
|
||||
})
|
||||
# ---
|
||||
# name: test_vacuum_sensor_states[platforms0-vacuum_device.json][sensor.robot_vacuum_cleaner_program-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
@@ -7547,52 +7106,3 @@
|
||||
'state': 'unknown',
|
||||
})
|
||||
# ---
|
||||
# name: test_vacuum_sensor_states[platforms0-vacuum_device.json][sensor.robot_vacuum_cleaner_start-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.robot_vacuum_cleaner_start',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.TIMESTAMP: 'timestamp'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Start',
|
||||
'platform': 'miele',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'start',
|
||||
'unique_id': 'Dummy_Vacuum_1-state_start_timestamp',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_vacuum_sensor_states[platforms0-vacuum_device.json][sensor.robot_vacuum_cleaner_start-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'timestamp',
|
||||
'friendly_name': 'Robot vacuum cleaner Start',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.robot_vacuum_cleaner_start',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'unknown',
|
||||
})
|
||||
# ---
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Tests for miele sensor module."""
|
||||
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from datetime import timedelta
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from freezegun.api import FrozenDateTimeFactory
|
||||
@@ -23,7 +23,6 @@ from tests.common import (
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.freeze_time("2025-05-31 12:30:00+00:00")
|
||||
@pytest.mark.parametrize("platforms", [(SENSOR_DOMAIN,)])
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_sensor_states(
|
||||
@@ -38,7 +37,6 @@ async def test_sensor_states(
|
||||
await snapshot_platform(hass, entity_registry, snapshot, setup_platform.entry_id)
|
||||
|
||||
|
||||
@pytest.mark.freeze_time("2025-05-31 12:30:00+00:00")
|
||||
@pytest.mark.parametrize("platforms", [(SENSOR_DOMAIN,)])
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_sensor_states_api_push(
|
||||
@@ -304,7 +302,6 @@ async def test_laundry_wash_scenario(
|
||||
"""Parametrized test for verifying time sensors for wahsing machine devices when API glitches at program end."""
|
||||
|
||||
step = 0
|
||||
freezer.move_to("2025-05-31T12:00:00+00:00")
|
||||
|
||||
# Initial state when the washing machine is off
|
||||
check_sensor_state(hass, "sensor.washing_machine", "off", step)
|
||||
@@ -320,8 +317,6 @@ async def test_laundry_wash_scenario(
|
||||
check_sensor_state(hass, "sensor.washing_machine_remaining_time", "unknown", step)
|
||||
# OFF -> elapsed forced to unknown (some devices continue reporting last value of last cycle)
|
||||
check_sensor_state(hass, "sensor.washing_machine_elapsed_time", "unknown", step)
|
||||
check_sensor_state(hass, "sensor.washing_machine_start", "unknown", step)
|
||||
check_sensor_state(hass, "sensor.washing_machine_finish", "unknown", step)
|
||||
# consumption sensors have to report "unknown" when the device is not working
|
||||
check_sensor_state(
|
||||
hass, "sensor.washing_machine_energy_consumption", "unknown", step
|
||||
@@ -362,7 +357,7 @@ async def test_laundry_wash_scenario(
|
||||
},
|
||||
}
|
||||
|
||||
freezer.move_to("2025-05-31T12:30:00+00:00")
|
||||
freezer.tick(timedelta(seconds=130))
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
@@ -381,12 +376,8 @@ async def test_laundry_wash_scenario(
|
||||
"unit": "l",
|
||||
},
|
||||
}
|
||||
device_fixture["DummyWasher"]["state"]["elapsedTime"][0] = 0
|
||||
device_fixture["DummyWasher"]["state"]["elapsedTime"][1] = 14
|
||||
device_fixture["DummyWasher"]["state"]["remainingTime"][0] = 1
|
||||
device_fixture["DummyWasher"]["state"]["remainingTime"][1] = 43
|
||||
|
||||
freezer.move_to("2025-05-31T12:32:00+00:00")
|
||||
freezer.tick(timedelta(seconds=130))
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
@@ -398,14 +389,8 @@ async def test_laundry_wash_scenario(
|
||||
check_sensor_state(hass, "sensor.washing_machine_target_temperature", "30.0", step)
|
||||
check_sensor_state(hass, "sensor.washing_machine_spin_speed", "1200", step)
|
||||
# IN_USE -> elapsed, remaining time from API (normal case)
|
||||
check_sensor_state(hass, "sensor.washing_machine_remaining_time", "103", step)
|
||||
check_sensor_state(hass, "sensor.washing_machine_elapsed_time", "14", step)
|
||||
check_sensor_state(
|
||||
hass, "sensor.washing_machine_start", "2025-05-31T12:18:00+00:00", step
|
||||
)
|
||||
check_sensor_state(
|
||||
hass, "sensor.washing_machine_finish", "2025-05-31T14:15:00+00:00", step
|
||||
)
|
||||
check_sensor_state(hass, "sensor.washing_machine_remaining_time", "105", step)
|
||||
check_sensor_state(hass, "sensor.washing_machine_elapsed_time", "12", step)
|
||||
check_sensor_state(hass, "sensor.washing_machine_energy_consumption", "0.0", step)
|
||||
check_sensor_state(hass, "sensor.washing_machine_water_consumption", "0", step)
|
||||
|
||||
@@ -421,7 +406,7 @@ async def test_laundry_wash_scenario(
|
||||
},
|
||||
}
|
||||
|
||||
freezer.move_to("2025-05-31T12:34:00+00:00")
|
||||
freezer.tick(timedelta(seconds=130))
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
@@ -441,7 +426,7 @@ async def test_laundry_wash_scenario(
|
||||
device_fixture["DummyWasher"]["state"]["elapsedTime"][0] = 1
|
||||
device_fixture["DummyWasher"]["state"]["elapsedTime"][1] = 49
|
||||
|
||||
freezer.move_to("2025-05-31T14:07:00+00:00")
|
||||
freezer.tick(timedelta(seconds=130))
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
step += 1
|
||||
@@ -454,12 +439,6 @@ async def test_laundry_wash_scenario(
|
||||
# RINSE HOLD -> elapsed, remaining time from API (normal case)
|
||||
check_sensor_state(hass, "sensor.washing_machine_remaining_time", "8", step)
|
||||
check_sensor_state(hass, "sensor.washing_machine_elapsed_time", "109", step)
|
||||
check_sensor_state(
|
||||
hass, "sensor.washing_machine_start", "2025-05-31T12:18:00+00:00", step
|
||||
)
|
||||
check_sensor_state(
|
||||
hass, "sensor.washing_machine_finish", "2025-05-31T14:15:00+00:00", step
|
||||
)
|
||||
|
||||
# Simulate program ended
|
||||
device_fixture["DummyWasher"]["state"]["status"]["value_raw"] = 7
|
||||
@@ -474,7 +453,7 @@ async def test_laundry_wash_scenario(
|
||||
device_fixture["DummyWasher"]["state"]["elapsedTime"][1] = 0
|
||||
device_fixture["DummyWasher"]["state"]["ecoFeedback"] = None
|
||||
|
||||
freezer.move_to("2025-05-31T14:30:00+00:00")
|
||||
freezer.tick(timedelta(seconds=130))
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
step += 1
|
||||
@@ -490,12 +469,6 @@ async def test_laundry_wash_scenario(
|
||||
check_sensor_state(hass, "sensor.washing_machine_remaining_time", "0", step)
|
||||
# PROGRAM_ENDED -> elapsed time kept from last program (some devices immediately go to 0)
|
||||
check_sensor_state(hass, "sensor.washing_machine_elapsed_time", "109", step)
|
||||
check_sensor_state(
|
||||
hass, "sensor.washing_machine_start", "2025-05-31T12:18:00+00:00", step
|
||||
)
|
||||
check_sensor_state(
|
||||
hass, "sensor.washing_machine_finish", "2025-05-31T14:15:00+00:00", step
|
||||
)
|
||||
# consumption values now are reporting last known value, API might start reporting null object
|
||||
check_sensor_state(hass, "sensor.washing_machine_energy_consumption", "0.1", step)
|
||||
check_sensor_state(hass, "sensor.washing_machine_water_consumption", "7", step)
|
||||
@@ -516,7 +489,7 @@ async def test_laundry_wash_scenario(
|
||||
device_fixture["DummyWasher"]["state"]["elapsedTime"][0] = 0
|
||||
device_fixture["DummyWasher"]["state"]["elapsedTime"][1] = 0
|
||||
|
||||
freezer.move_to("2025-05-31T14:32:00+00:00")
|
||||
freezer.tick(timedelta(seconds=130))
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
step += 1
|
||||
@@ -531,10 +504,6 @@ async def test_laundry_wash_scenario(
|
||||
# PROGRAMMED -> elapsed, remaining time from API (normal case)
|
||||
check_sensor_state(hass, "sensor.washing_machine_remaining_time", "119", step)
|
||||
check_sensor_state(hass, "sensor.washing_machine_elapsed_time", "0", step)
|
||||
check_sensor_state(hass, "sensor.washing_machine_start", "unknown", step)
|
||||
check_sensor_state(
|
||||
hass, "sensor.washing_machine_finish", "2025-05-31T16:31:00+00:00", step
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("load_device_file", ["laundry.json"])
|
||||
@@ -550,7 +519,6 @@ async def test_laundry_dry_scenario(
|
||||
"""Parametrized test for verifying time sensors for tumble dryer devices when API reports time value from last cycle, when device is off."""
|
||||
|
||||
step = 0
|
||||
freezer.move_to("2025-05-31T12:00:00+00:00")
|
||||
|
||||
# Initial state when the washing machine is off
|
||||
check_sensor_state(hass, "sensor.tumble_dryer", "off", step)
|
||||
@@ -560,8 +528,6 @@ async def test_laundry_dry_scenario(
|
||||
# OFF -> elapsed, remaining forced to unknown (some devices continue reporting last value of last cycle)
|
||||
check_sensor_state(hass, "sensor.tumble_dryer_remaining_time", "unknown", step)
|
||||
check_sensor_state(hass, "sensor.tumble_dryer_elapsed_time", "unknown", step)
|
||||
check_sensor_state(hass, "sensor.tumble_dryer_start", "unknown", step)
|
||||
check_sensor_state(hass, "sensor.tumble_dryer_finish", "unknown", step)
|
||||
|
||||
# Simulate program started
|
||||
device_fixture["DummyDryer"]["state"]["status"]["value_raw"] = 5
|
||||
@@ -579,7 +545,7 @@ async def test_laundry_dry_scenario(
|
||||
device_fixture["DummyDryer"]["state"]["dryingStep"]["value_raw"] = 2
|
||||
device_fixture["DummyDryer"]["state"]["dryingStep"]["value_localized"] = "Normal"
|
||||
|
||||
freezer.move_to("2025-05-31T12:30:00+00:00")
|
||||
freezer.tick(timedelta(seconds=130))
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
step += 1
|
||||
@@ -591,12 +557,6 @@ async def test_laundry_dry_scenario(
|
||||
# IN_USE -> elapsed, remaining time from API (normal case)
|
||||
check_sensor_state(hass, "sensor.tumble_dryer_remaining_time", "49", step)
|
||||
check_sensor_state(hass, "sensor.tumble_dryer_elapsed_time", "20", step)
|
||||
check_sensor_state(
|
||||
hass, "sensor.tumble_dryer_start", "2025-05-31T12:10:00+00:00", step
|
||||
)
|
||||
check_sensor_state(
|
||||
hass, "sensor.tumble_dryer_finish", "2025-05-31T13:19:00+00:00", step
|
||||
)
|
||||
|
||||
# Simulate program end
|
||||
device_fixture["DummyDryer"]["state"]["status"]["value_raw"] = 7
|
||||
@@ -610,7 +570,7 @@ async def test_laundry_dry_scenario(
|
||||
device_fixture["DummyDryer"]["state"]["elapsedTime"][0] = 1
|
||||
device_fixture["DummyDryer"]["state"]["elapsedTime"][1] = 18
|
||||
|
||||
freezer.move_to("2025-05-31T14:30:00+00:00")
|
||||
freezer.tick(timedelta(seconds=130))
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
step += 1
|
||||
@@ -623,18 +583,9 @@ async def test_laundry_dry_scenario(
|
||||
check_sensor_state(hass, "sensor.tumble_dryer_remaining_time", "0", step)
|
||||
# PROGRAM_ENDED -> elapsed time kept from last program (some devices immediately go to 0)
|
||||
check_sensor_state(hass, "sensor.tumble_dryer_elapsed_time", "20", step)
|
||||
check_sensor_state(
|
||||
hass, "sensor.tumble_dryer_start", "2025-05-31T12:10:00+00:00", step
|
||||
)
|
||||
check_sensor_state(
|
||||
hass, "sensor.tumble_dryer_finish", "2025-05-31T13:19:00+00:00", step
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("restore_state", ["45", STATE_UNKNOWN, STATE_UNAVAILABLE])
|
||||
@pytest.mark.parametrize(
|
||||
"restore_state_abs", ["2025-05-31T13:19:00+00:00", STATE_UNKNOWN, STATE_UNAVAILABLE]
|
||||
)
|
||||
@pytest.mark.parametrize("load_device_file", ["laundry.json"])
|
||||
@pytest.mark.parametrize("platforms", [(SENSOR_DOMAIN,)])
|
||||
async def test_elapsed_time_sensor_restored(
|
||||
@@ -645,12 +596,10 @@ async def test_elapsed_time_sensor_restored(
|
||||
device_fixture: MieleDevices,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
restore_state,
|
||||
restore_state_abs,
|
||||
) -> None:
|
||||
"""Test that elapsed time returns the restored value when program ended."""
|
||||
|
||||
entity_id = "sensor.washing_machine_elapsed_time"
|
||||
entity_id_abs = "sensor.washing_machine_finish"
|
||||
|
||||
# Simulate program started
|
||||
device_fixture["DummyWasher"]["state"]["status"]["value_raw"] = 5
|
||||
@@ -674,12 +623,11 @@ async def test_elapsed_time_sensor_restored(
|
||||
device_fixture["DummyWasher"]["state"]["spinningSpeed"]["value_raw"] = 1200
|
||||
device_fixture["DummyWasher"]["state"]["spinningSpeed"]["value_localized"] = "1200"
|
||||
|
||||
freezer.move_to(datetime(2025, 5, 31, 12, 30, tzinfo=UTC))
|
||||
freezer.tick(timedelta(seconds=130))
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get(entity_id).state == "12"
|
||||
assert hass.states.get(entity_id_abs).state == "2025-05-31T14:15:00+00:00"
|
||||
|
||||
# Simulate program ended
|
||||
device_fixture["DummyWasher"]["state"]["status"]["value_raw"] = 7
|
||||
@@ -693,7 +641,7 @@ async def test_elapsed_time_sensor_restored(
|
||||
device_fixture["DummyWasher"]["state"]["elapsedTime"][0] = 0
|
||||
device_fixture["DummyWasher"]["state"]["elapsedTime"][1] = 0
|
||||
|
||||
freezer.move_to(datetime(2025, 5, 31, 14, 20, tzinfo=UTC))
|
||||
freezer.tick(timedelta(seconds=130))
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
@@ -703,7 +651,6 @@ async def test_elapsed_time_sensor_restored(
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get(entity_id).state == "unavailable"
|
||||
assert hass.states.get(entity_id_abs).state == "unavailable"
|
||||
|
||||
# simulate restore with state different from native value
|
||||
mock_restore_cache_with_extra_data(
|
||||
@@ -722,19 +669,9 @@ async def test_elapsed_time_sensor_restored(
|
||||
"native_unit_of_measurement": "min",
|
||||
},
|
||||
),
|
||||
(
|
||||
State(
|
||||
entity_id_abs,
|
||||
restore_state_abs,
|
||||
{"device_class": "timestamp"},
|
||||
),
|
||||
{
|
||||
"native_value": datetime(2025, 5, 31, 14, 15, tzinfo=UTC),
|
||||
"native_unit_of_measurement": None,
|
||||
},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
await hass.config_entries.async_reload(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
@@ -742,8 +679,3 @@ async def test_elapsed_time_sensor_restored(
|
||||
state = hass.states.get(entity_id)
|
||||
assert state is not None
|
||||
assert state.state == "12"
|
||||
|
||||
# check that absolute time is the one restored and not the value reported by API
|
||||
state = hass.states.get(entity_id_abs)
|
||||
assert state is not None
|
||||
assert state.state == "2025-05-31T14:15:00+00:00"
|
||||
|
||||
@@ -103,7 +103,6 @@ async def test_form_create_entry_with_auth(
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_setup_entry")
|
||||
async def test_reauth_successful(hass: HomeAssistant) -> None:
|
||||
"""Test starting a reauthentication flow."""
|
||||
entry = MockConfigEntry(
|
||||
@@ -376,7 +375,6 @@ async def test_zeroconf_errors(hass: HomeAssistant, error) -> None:
|
||||
assert result["reason"] == reason
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_setup_entry")
|
||||
async def test_reconfigure_successful(hass: HomeAssistant) -> None:
|
||||
"""Test starting a reconfigure flow."""
|
||||
entry = MockConfigEntry(
|
||||
@@ -414,7 +412,6 @@ async def test_reconfigure_successful(hass: HomeAssistant) -> None:
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_setup_entry")
|
||||
async def test_reconfigure_not_successful(hass: HomeAssistant) -> None:
|
||||
"""Test starting a reconfigure flow but no connection found."""
|
||||
entry = MockConfigEntry(
|
||||
|
||||
@@ -54,10 +54,6 @@ async def test_user_api_key(
|
||||
CONF_NAME: NAME,
|
||||
CONF_SITE_ID: SITE_ID,
|
||||
CONF_SECTION_API_AUTH: {CONF_API_KEY: API_KEY},
|
||||
CONF_SECTION_WEB_AUTH: {
|
||||
CONF_USERNAME: "",
|
||||
CONF_PASSWORD: "",
|
||||
},
|
||||
},
|
||||
)
|
||||
assert result.get("type") is FlowResultType.CREATE_ENTRY
|
||||
@@ -89,7 +85,6 @@ async def test_user_web_login(
|
||||
{
|
||||
CONF_NAME: NAME,
|
||||
CONF_SITE_ID: SITE_ID,
|
||||
CONF_SECTION_API_AUTH: {CONF_API_KEY: ""},
|
||||
CONF_SECTION_WEB_AUTH: {
|
||||
CONF_USERNAME: USERNAME,
|
||||
CONF_PASSWORD: PASSWORD,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Tests for the SolarEdge integration."""
|
||||
|
||||
from unittest.mock import AsyncMock, Mock, patch
|
||||
from unittest.mock import AsyncMock, Mock
|
||||
|
||||
from aiohttp import ClientError
|
||||
|
||||
@@ -15,15 +15,8 @@ from .conftest import API_KEY, PASSWORD, SITE_ID, USERNAME
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
@patch(
|
||||
"homeassistant.config_entries.ConfigEntries.async_unload_platforms",
|
||||
return_value=True,
|
||||
)
|
||||
async def test_setup_unload_api_key(
|
||||
mock_unload_platforms: AsyncMock,
|
||||
recorder_mock: Recorder,
|
||||
hass: HomeAssistant,
|
||||
solaredge_api: Mock,
|
||||
recorder_mock: Recorder, hass: HomeAssistant, solaredge_api: Mock
|
||||
) -> None:
|
||||
"""Test successful setup and unload of a config entry with API key."""
|
||||
entry = MockConfigEntry(
|
||||
@@ -40,21 +33,11 @@ async def test_setup_unload_api_key(
|
||||
|
||||
assert await hass.config_entries.async_unload(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Unloading should be attempted because sensors were set up.
|
||||
mock_unload_platforms.assert_awaited_once()
|
||||
assert entry.state is ConfigEntryState.NOT_LOADED
|
||||
|
||||
|
||||
@patch(
|
||||
"homeassistant.config_entries.ConfigEntries.async_unload_platforms",
|
||||
return_value=True,
|
||||
)
|
||||
async def test_setup_unload_web_login(
|
||||
mock_unload_platforms: AsyncMock,
|
||||
recorder_mock: Recorder,
|
||||
hass: HomeAssistant,
|
||||
solaredge_web_api: AsyncMock,
|
||||
recorder_mock: Recorder, hass: HomeAssistant, solaredge_web_api: AsyncMock
|
||||
) -> None:
|
||||
"""Test successful setup and unload of a config entry with web login."""
|
||||
entry = MockConfigEntry(
|
||||
@@ -76,18 +59,10 @@ async def test_setup_unload_web_login(
|
||||
|
||||
assert await hass.config_entries.async_unload(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Unloading should NOT be attempted because sensors were not set up.
|
||||
mock_unload_platforms.assert_not_called()
|
||||
assert entry.state is ConfigEntryState.NOT_LOADED
|
||||
|
||||
|
||||
@patch(
|
||||
"homeassistant.config_entries.ConfigEntries.async_unload_platforms",
|
||||
return_value=True,
|
||||
)
|
||||
async def test_setup_unload_both(
|
||||
mock_unload_platforms: AsyncMock,
|
||||
recorder_mock: Recorder,
|
||||
hass: HomeAssistant,
|
||||
solaredge_api: Mock,
|
||||
@@ -115,8 +90,6 @@ async def test_setup_unload_both(
|
||||
|
||||
assert await hass.config_entries.async_unload(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
mock_unload_platforms.assert_awaited_once()
|
||||
assert entry.state is ConfigEntryState.NOT_LOADED
|
||||
|
||||
|
||||
|
||||
@@ -44,17 +44,6 @@ ENTRY_CONFIG = {
|
||||
},
|
||||
}
|
||||
|
||||
ENTRY_CONFIG_BLANK_QUERY = {
|
||||
CONF_NAME: "Get Value",
|
||||
CONF_QUERY: " ",
|
||||
CONF_COLUMN_NAME: "value",
|
||||
CONF_ADVANCED_OPTIONS: {
|
||||
CONF_UNIT_OF_MEASUREMENT: "MiB",
|
||||
CONF_DEVICE_CLASS: SensorDeviceClass.DATA_SIZE,
|
||||
CONF_STATE_CLASS: SensorStateClass.TOTAL,
|
||||
},
|
||||
}
|
||||
|
||||
ENTRY_CONFIG_WITH_VALUE_TEMPLATE = {
|
||||
CONF_QUERY: "SELECT 5 as value",
|
||||
CONF_COLUMN_NAME: "value",
|
||||
@@ -64,33 +53,6 @@ ENTRY_CONFIG_WITH_VALUE_TEMPLATE = {
|
||||
},
|
||||
}
|
||||
|
||||
ENTRY_CONFIG_WITH_QUERY_TEMPLATE = {
|
||||
CONF_QUERY: "SELECT {% if states('sensor.input1')=='on' %} 5 {% else %} 6 {% endif %} as value",
|
||||
CONF_COLUMN_NAME: "value",
|
||||
CONF_ADVANCED_OPTIONS: {
|
||||
CONF_UNIT_OF_MEASUREMENT: "MiB",
|
||||
CONF_VALUE_TEMPLATE: "{{ value }}",
|
||||
},
|
||||
}
|
||||
|
||||
ENTRY_CONFIG_WITH_BROKEN_QUERY_TEMPLATE = {
|
||||
CONF_QUERY: "SELECT {{ 5 as value",
|
||||
CONF_COLUMN_NAME: "value",
|
||||
CONF_ADVANCED_OPTIONS: {
|
||||
CONF_UNIT_OF_MEASUREMENT: "MiB",
|
||||
CONF_VALUE_TEMPLATE: "{{ value }}",
|
||||
},
|
||||
}
|
||||
|
||||
ENTRY_CONFIG_WITH_BROKEN_QUERY_TEMPLATE_OPT = {
|
||||
CONF_QUERY: "SELECT {{ 5 as value",
|
||||
CONF_COLUMN_NAME: "value",
|
||||
CONF_ADVANCED_OPTIONS: {
|
||||
CONF_UNIT_OF_MEASUREMENT: "MiB",
|
||||
CONF_VALUE_TEMPLATE: "{{ value }}",
|
||||
},
|
||||
}
|
||||
|
||||
ENTRY_CONFIG_INVALID_QUERY = {
|
||||
CONF_QUERY: "SELECT 5 FROM as value",
|
||||
CONF_COLUMN_NAME: "size",
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
import re
|
||||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
|
||||
@@ -11,7 +10,7 @@ import pytest
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.recorder import CONF_DB_URL, Recorder
|
||||
from homeassistant.components.recorder import CONF_DB_URL
|
||||
from homeassistant.components.sensor import (
|
||||
CONF_STATE_CLASS,
|
||||
SensorDeviceClass,
|
||||
@@ -30,7 +29,7 @@ from homeassistant.const import (
|
||||
CONF_VALUE_TEMPLATE,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType, InvalidData
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
|
||||
from . import (
|
||||
ENTRY_CONFIG,
|
||||
@@ -49,9 +48,6 @@ from . import (
|
||||
ENTRY_CONFIG_QUERY_NO_READ_ONLY_CTE,
|
||||
ENTRY_CONFIG_QUERY_NO_READ_ONLY_CTE_OPT,
|
||||
ENTRY_CONFIG_QUERY_NO_READ_ONLY_OPT,
|
||||
ENTRY_CONFIG_WITH_BROKEN_QUERY_TEMPLATE,
|
||||
ENTRY_CONFIG_WITH_BROKEN_QUERY_TEMPLATE_OPT,
|
||||
ENTRY_CONFIG_WITH_QUERY_TEMPLATE,
|
||||
ENTRY_CONFIG_WITH_VALUE_TEMPLATE,
|
||||
)
|
||||
|
||||
@@ -110,91 +106,7 @@ async def test_form_simple(
|
||||
}
|
||||
|
||||
|
||||
async def test_form_with_query_template(
|
||||
recorder_mock: Recorder, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test for with query template."""
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {}
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.sql.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock_setup_entry:
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
DATA_CONFIG,
|
||||
)
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
ENTRY_CONFIG_WITH_QUERY_TEMPLATE,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "Get Value"
|
||||
assert result["options"] == {
|
||||
CONF_QUERY: "SELECT {% if states('sensor.input1')=='on' %} 5 {% else %} 6 {% endif %} as value",
|
||||
CONF_COLUMN_NAME: "value",
|
||||
CONF_ADVANCED_OPTIONS: {
|
||||
CONF_UNIT_OF_MEASUREMENT: "MiB",
|
||||
CONF_VALUE_TEMPLATE: "{{ value }}",
|
||||
},
|
||||
}
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_form_with_broken_query_template(
|
||||
recorder_mock: Recorder, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test form with broken query template."""
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {}
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
DATA_CONFIG,
|
||||
)
|
||||
message = re.escape("Schema validation failed @ data['query']")
|
||||
with pytest.raises(InvalidData, match=message):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
ENTRY_CONFIG_WITH_BROKEN_QUERY_TEMPLATE,
|
||||
)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.sql.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock_setup_entry:
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
ENTRY_CONFIG_WITH_QUERY_TEMPLATE,
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "Get Value"
|
||||
assert result["options"] == {
|
||||
CONF_QUERY: "SELECT {% if states('sensor.input1')=='on' %} 5 {% else %} 6 {% endif %} as value",
|
||||
CONF_COLUMN_NAME: "value",
|
||||
CONF_ADVANCED_OPTIONS: {
|
||||
CONF_UNIT_OF_MEASUREMENT: "MiB",
|
||||
CONF_VALUE_TEMPLATE: "{{ value }}",
|
||||
},
|
||||
}
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_form_with_value_template(
|
||||
recorder_mock: Recorder, hass: HomeAssistant
|
||||
) -> None:
|
||||
async def test_form_with_value_template(hass: HomeAssistant) -> None:
|
||||
"""Test for with value template."""
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
@@ -280,7 +192,7 @@ async def test_flow_fails_invalid_query(hass: HomeAssistant) -> None:
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {
|
||||
CONF_QUERY: "query_no_read_only",
|
||||
CONF_QUERY: "query_invalid",
|
||||
}
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
@@ -290,7 +202,7 @@ async def test_flow_fails_invalid_query(hass: HomeAssistant) -> None:
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {
|
||||
CONF_QUERY: "query_no_read_only",
|
||||
CONF_QUERY: "query_invalid",
|
||||
}
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
@@ -572,7 +484,7 @@ async def test_options_flow_fails_invalid_query(hass: HomeAssistant) -> None:
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {
|
||||
CONF_QUERY: "query_no_read_only",
|
||||
CONF_QUERY: "query_invalid",
|
||||
}
|
||||
|
||||
result = await hass.config_entries.options.async_configure(
|
||||
@@ -582,8 +494,9 @@ async def test_options_flow_fails_invalid_query(hass: HomeAssistant) -> None:
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {
|
||||
CONF_QUERY: "query_no_read_only",
|
||||
CONF_QUERY: "query_invalid",
|
||||
}
|
||||
|
||||
result = await hass.config_entries.options.async_configure(
|
||||
result["flow_id"],
|
||||
user_input=ENTRY_CONFIG_QUERY_NO_READ_ONLY_OPT,
|
||||
@@ -614,13 +527,6 @@ async def test_options_flow_fails_invalid_query(hass: HomeAssistant) -> None:
|
||||
CONF_QUERY: "multiple_queries",
|
||||
}
|
||||
|
||||
message = re.escape("Schema validation failed @ data['query']")
|
||||
with pytest.raises(InvalidData, match=message):
|
||||
result = await hass.config_entries.options.async_configure(
|
||||
result["flow_id"],
|
||||
user_input=ENTRY_CONFIG_WITH_BROKEN_QUERY_TEMPLATE_OPT,
|
||||
)
|
||||
|
||||
result = await hass.config_entries.options.async_configure(
|
||||
result["flow_id"],
|
||||
user_input={
|
||||
|
||||
@@ -4,9 +4,6 @@ from __future__ import annotations
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.recorder import CONF_DB_URL, Recorder
|
||||
from homeassistant.components.sensor import (
|
||||
CONF_STATE_CLASS,
|
||||
@@ -19,7 +16,6 @@ from homeassistant.components.sql.const import (
|
||||
CONF_QUERY,
|
||||
DOMAIN,
|
||||
)
|
||||
from homeassistant.components.sql.util import validate_sql_select
|
||||
from homeassistant.config_entries import SOURCE_USER, ConfigEntryState
|
||||
from homeassistant.const import (
|
||||
CONF_DEVICE_CLASS,
|
||||
@@ -28,7 +24,6 @@ from homeassistant.const import (
|
||||
CONF_VALUE_TEMPLATE,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.template import Template
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from . import YAML_CONFIG_INVALID, YAML_CONFIG_NO_DB, init_integration
|
||||
@@ -72,45 +67,6 @@ async def test_setup_invalid_config(
|
||||
await hass.async_block_till_done()
|
||||
|
||||
|
||||
async def test_invalid_query(hass: HomeAssistant) -> None:
|
||||
"""Test invalid query."""
|
||||
with pytest.raises(vol.Invalid, match="SQL query must be of type SELECT"):
|
||||
validate_sql_select(Template("DROP TABLE *", hass))
|
||||
|
||||
with pytest.raises(vol.Invalid, match="SQL query is empty or unknown type"):
|
||||
validate_sql_select(Template("SELECT5 as value", hass))
|
||||
|
||||
with pytest.raises(vol.Invalid, match="SQL query is empty or unknown type"):
|
||||
validate_sql_select(Template(";;", hass))
|
||||
|
||||
|
||||
async def test_query_no_read_only(hass: HomeAssistant) -> None:
|
||||
"""Test query no read only."""
|
||||
with pytest.raises(vol.Invalid, match="SQL query must be of type SELECT"):
|
||||
validate_sql_select(
|
||||
Template("UPDATE states SET state = 999999 WHERE state_id = 11125", hass)
|
||||
)
|
||||
|
||||
|
||||
async def test_query_no_read_only_cte(hass: HomeAssistant) -> None:
|
||||
"""Test query no read only CTE."""
|
||||
with pytest.raises(vol.Invalid, match="SQL query must be of type SELECT"):
|
||||
validate_sql_select(
|
||||
Template(
|
||||
"WITH test AS (SELECT state FROM states) UPDATE states SET states.state = test.state;",
|
||||
hass,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
async def test_multiple_queries(hass: HomeAssistant) -> None:
|
||||
"""Test multiple queries."""
|
||||
with pytest.raises(vol.Invalid, match="Multiple SQL statements are not allowed"):
|
||||
validate_sql_select(
|
||||
Template("SELECT 5 as value; UPDATE states SET state = 10;", hass)
|
||||
)
|
||||
|
||||
|
||||
async def test_migration_from_future(
|
||||
recorder_mock: Recorder, hass: HomeAssistant
|
||||
) -> None:
|
||||
|
||||
@@ -39,6 +39,7 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.helpers.entity_platform import async_get_platforms
|
||||
from homeassistant.setup import async_setup_component
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
@@ -108,33 +109,6 @@ async def test_query_value_template(
|
||||
}
|
||||
|
||||
|
||||
async def test_template_query(
|
||||
recorder_mock: Recorder,
|
||||
hass: HomeAssistant,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
) -> None:
|
||||
"""Test the SQL sensor with a query template."""
|
||||
options = {
|
||||
CONF_QUERY: "SELECT {% if states('sensor.input1')=='on' %} 5 {% else %} 6 {% endif %} as value",
|
||||
CONF_COLUMN_NAME: "value",
|
||||
CONF_ADVANCED_OPTIONS: {
|
||||
CONF_VALUE_TEMPLATE: "{{ value | int }}",
|
||||
},
|
||||
}
|
||||
await init_integration(hass, title="count_tables", options=options)
|
||||
|
||||
state = hass.states.get("sensor.count_tables")
|
||||
assert state.state == "6"
|
||||
|
||||
hass.states.async_set("sensor.input1", "on")
|
||||
freezer.tick(timedelta(minutes=1))
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done(wait_background_tasks=True)
|
||||
|
||||
state = hass.states.get("sensor.count_tables")
|
||||
assert state.state == "5"
|
||||
|
||||
|
||||
async def test_query_value_template_invalid(
|
||||
recorder_mock: Recorder, hass: HomeAssistant
|
||||
) -> None:
|
||||
@@ -150,59 +124,6 @@ async def test_query_value_template_invalid(
|
||||
assert state.state == "5.01"
|
||||
|
||||
|
||||
async def test_broken_template_query(
|
||||
recorder_mock: Recorder,
|
||||
hass: HomeAssistant,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
) -> None:
|
||||
"""Test the SQL sensor with a query template which is broken."""
|
||||
options = {
|
||||
CONF_QUERY: "SELECT {{ 5 as value",
|
||||
CONF_COLUMN_NAME: "value",
|
||||
CONF_ADVANCED_OPTIONS: {
|
||||
CONF_VALUE_TEMPLATE: "{{ value | int }}",
|
||||
},
|
||||
}
|
||||
await init_integration(hass, title="count_tables", options=options)
|
||||
|
||||
state = hass.states.get("sensor.count_tables")
|
||||
assert not state
|
||||
|
||||
|
||||
async def test_broken_template_query_2(
|
||||
recorder_mock: Recorder,
|
||||
hass: HomeAssistant,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test the SQL sensor with a query template."""
|
||||
hass.states.async_set("sensor.input1", "5")
|
||||
await hass.async_block_till_done(wait_background_tasks=True)
|
||||
|
||||
options = {
|
||||
CONF_QUERY: "SELECT {{ states.sensor.input1.state | int / 1000}} as value",
|
||||
CONF_COLUMN_NAME: "value",
|
||||
}
|
||||
await init_integration(hass, title="count_tables", options=options)
|
||||
|
||||
state = hass.states.get("sensor.count_tables")
|
||||
assert state.state == "0.005"
|
||||
|
||||
hass.states.async_set("sensor.input1", "on")
|
||||
freezer.tick(timedelta(minutes=1))
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done(wait_background_tasks=True)
|
||||
|
||||
state = hass.states.get("sensor.count_tables")
|
||||
assert state.state == "0.005"
|
||||
assert (
|
||||
"Error rendering query SELECT {{ states.sensor.input1.state | int / 1000}} as value"
|
||||
" LIMIT 1;: ValueError: Template error: int got invalid input 'on' when rendering"
|
||||
" template 'SELECT {{ states.sensor.input1.state | int / 1000}} as value LIMIT 1;'"
|
||||
" but no default was specified" in caplog.text
|
||||
)
|
||||
|
||||
|
||||
async def test_query_limit(recorder_mock: Recorder, hass: HomeAssistant) -> None:
|
||||
"""Test the SQL sensor with a query containing 'LIMIT' in lowercase."""
|
||||
options = {
|
||||
@@ -720,14 +641,17 @@ async def test_query_recover_from_rollback(
|
||||
CONF_UNIQUE_ID: "very_unique_id",
|
||||
}
|
||||
await init_integration(hass, title="Select value SQL query", options=options)
|
||||
platforms = async_get_platforms(hass, "sql")
|
||||
sql_entity = platforms[0].entities["sensor.select_value_sql_query"]
|
||||
|
||||
state = hass.states.get("sensor.select_value_sql_query")
|
||||
assert state.state == "5"
|
||||
assert state.attributes["value"] == 5
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.sql.sensor.generate_lambda_stmt",
|
||||
return_value=generate_lambda_stmt("Faulty syntax create operational issue"),
|
||||
with patch.object(
|
||||
sql_entity,
|
||||
"_lambda_stmt",
|
||||
generate_lambda_stmt("Faulty syntax create operational issue"),
|
||||
):
|
||||
freezer.tick(timedelta(minutes=1))
|
||||
async_fire_time_changed(hass)
|
||||
|
||||
@@ -153,7 +153,7 @@ async def test_query_service_invalid_query_not_select(
|
||||
await async_setup_component(hass, DOMAIN, {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
with pytest.raises(vol.Invalid, match="SQL query must be of type SELECT"):
|
||||
with pytest.raises(vol.Invalid, match="Only SELECT queries allowed"):
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
SERVICE_QUERY,
|
||||
@@ -171,7 +171,7 @@ async def test_query_service_sqlalchemy_error(
|
||||
await async_setup_component(hass, DOMAIN, {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
with pytest.raises(MultipleInvalid, match="SQL query is empty or unknown type"):
|
||||
with pytest.raises(MultipleInvalid, match="Invalid SQL query"):
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
SERVICE_QUERY,
|
||||
|
||||
@@ -13,7 +13,6 @@ from homeassistant.components.sql.util import (
|
||||
validate_sql_select,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.template import Template
|
||||
|
||||
|
||||
async def test_resolve_db_url_when_none_configured(
|
||||
@@ -40,27 +39,27 @@ async def test_resolve_db_url_when_configured(hass: HomeAssistant) -> None:
|
||||
[
|
||||
(
|
||||
"DROP TABLE *",
|
||||
"SQL query must be of type SELECT",
|
||||
"Only SELECT queries allowed",
|
||||
),
|
||||
(
|
||||
"SELECT5 as value",
|
||||
"SQL query is empty or unknown type",
|
||||
"Invalid SQL query",
|
||||
),
|
||||
(
|
||||
";;",
|
||||
"SQL query is empty or unknown type",
|
||||
"Invalid SQL query",
|
||||
),
|
||||
(
|
||||
"UPDATE states SET state = 999999 WHERE state_id = 11125",
|
||||
"SQL query must be of type SELECT",
|
||||
"Only SELECT queries allowed",
|
||||
),
|
||||
(
|
||||
"WITH test AS (SELECT state FROM states) UPDATE states SET states.state = test.state;",
|
||||
"SQL query must be of type SELECT",
|
||||
"Only SELECT queries allowed",
|
||||
),
|
||||
(
|
||||
"SELECT 5 as value; UPDATE states SET state = 10;",
|
||||
"Multiple SQL statements are not allowed",
|
||||
"Multiple SQL queries are not supported",
|
||||
),
|
||||
],
|
||||
)
|
||||
@@ -71,7 +70,7 @@ async def test_invalid_sql_queries(
|
||||
) -> None:
|
||||
"""Test that various invalid or disallowed SQL queries raise the correct exception."""
|
||||
with pytest.raises(vol.Invalid, match=expected_error_message):
|
||||
validate_sql_select(Template(sql_query, hass))
|
||||
validate_sql_select(sql_query)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
||||
@@ -7,16 +7,7 @@ import socket
|
||||
from unittest.mock import AsyncMock, Mock, NonCallableMock, patch
|
||||
|
||||
from psutil import NoSuchProcess, Process
|
||||
from psutil._common import (
|
||||
sbattery,
|
||||
sdiskpart,
|
||||
sdiskusage,
|
||||
sfan,
|
||||
shwtemp,
|
||||
snetio,
|
||||
snicaddr,
|
||||
sswap,
|
||||
)
|
||||
from psutil._common import sdiskpart, sdiskusage, shwtemp, snetio, snicaddr, sswap
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.systemmonitor.const import DOMAIN
|
||||
@@ -217,12 +208,6 @@ def mock_psutil(mock_process: list[MockProcess]) -> Generator:
|
||||
]
|
||||
mock_psutil.boot_time.return_value = 1708786800.0
|
||||
mock_psutil.NoSuchProcess = NoSuchProcess
|
||||
mock_psutil.sensors_fans.return_value = {
|
||||
"asus": [sfan("cpu-fan", 1200), sfan("another-fan", 1300)],
|
||||
}
|
||||
mock_psutil.sensors_battery.return_value = sbattery(
|
||||
percent=93, secsleft=16628, power_plugged=False
|
||||
)
|
||||
yield mock_psutil
|
||||
|
||||
|
||||
|
||||
@@ -1,13 +1,4 @@
|
||||
# serializer version: 1
|
||||
# name: test_binary_sensor[System Monitor Charging - attributes]
|
||||
ReadOnlyDict({
|
||||
'device_class': 'battery_charging',
|
||||
'friendly_name': 'System Monitor Charging',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensor[System Monitor Charging - state]
|
||||
'off'
|
||||
# ---
|
||||
# name: test_binary_sensor[System Monitor Process pip - attributes]
|
||||
ReadOnlyDict({
|
||||
'device_class': 'running',
|
||||
|
||||
@@ -8,7 +8,6 @@
|
||||
'eth1': "[snicaddr(family=<AddressFamily.AF_INET: 2>, address='192.168.10.1', netmask='255.255.255.0', broadcast='255.255.255.255', ptp=None)]",
|
||||
'vethxyzxyz': "[snicaddr(family=<AddressFamily.AF_INET: 2>, address='172.16.10.1', netmask='255.255.255.0', broadcast='255.255.255.255', ptp=None)]",
|
||||
}),
|
||||
'battery': 'sbattery(percent=93, secsleft=16628, power_plugged=False)',
|
||||
'boot_time': '2024-02-24 15:00:00+00:00',
|
||||
'cpu_percent': '10.0',
|
||||
'disk_usage': dict({
|
||||
@@ -16,10 +15,6 @@
|
||||
'/home/notexist/': 'sdiskusage(total=536870912000, used=322122547200, free=214748364800, percent=60.0)',
|
||||
'/media/share': 'sdiskusage(total=536870912000, used=322122547200, free=214748364800, percent=60.0)',
|
||||
}),
|
||||
'fan_speed': dict({
|
||||
'another-fan': '1300',
|
||||
'cpu-fan': '1200',
|
||||
}),
|
||||
'io_counters': dict({
|
||||
'eth0': 'snetio(bytes_sent=104857600, bytes_recv=104857600, packets_sent=50, packets_recv=50, errin=0, errout=0, dropin=0, dropout=0)',
|
||||
'eth1': 'snetio(bytes_sent=209715200, bytes_recv=209715200, packets_sent=150, packets_recv=150, errin=0, errout=0, dropin=0, dropout=0)',
|
||||
@@ -78,7 +73,6 @@
|
||||
'coordinators': dict({
|
||||
'data': dict({
|
||||
'addresses': None,
|
||||
'battery': 'sbattery(percent=93, secsleft=16628, power_plugged=False)',
|
||||
'boot_time': '2024-02-24 15:00:00+00:00',
|
||||
'cpu_percent': '10.0',
|
||||
'disk_usage': dict({
|
||||
@@ -86,10 +80,6 @@
|
||||
'/home/notexist/': 'sdiskusage(total=536870912000, used=322122547200, free=214748364800, percent=60.0)',
|
||||
'/media/share': 'sdiskusage(total=536870912000, used=322122547200, free=214748364800, percent=60.0)',
|
||||
}),
|
||||
'fan_speed': dict({
|
||||
'another-fan': '1300',
|
||||
'cpu-fan': '1200',
|
||||
}),
|
||||
'io_counters': None,
|
||||
'load': '(1, 2, 3)',
|
||||
'memory': 'VirtualMemory(total=104857600, available=41943040, percent=40.0, used=62914560, free=31457280)',
|
||||
|
||||
@@ -1,25 +1,4 @@
|
||||
# serializer version: 1
|
||||
# name: test_sensor[System Monitor Battery - attributes]
|
||||
ReadOnlyDict({
|
||||
'device_class': 'battery',
|
||||
'friendly_name': 'System Monitor Battery',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
'unit_of_measurement': '%',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor[System Monitor Battery - state]
|
||||
'93'
|
||||
# ---
|
||||
# name: test_sensor[System Monitor Battery empty - attributes]
|
||||
ReadOnlyDict({
|
||||
'device_class': 'timestamp',
|
||||
'friendly_name': 'System Monitor Battery empty',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor[System Monitor Battery empty - state]
|
||||
'2024-02-24T19:37:00+00:00'
|
||||
# ---
|
||||
# name: test_sensor[System Monitor Disk free / - attributes]
|
||||
ReadOnlyDict({
|
||||
'device_class': 'data_size',
|
||||
@@ -393,23 +372,3 @@
|
||||
# name: test_sensor[System Monitor Swap use - state]
|
||||
'60.0'
|
||||
# ---
|
||||
# name: test_sensor[System Monitor another-fan fan speed - attributes]
|
||||
ReadOnlyDict({
|
||||
'friendly_name': 'System Monitor another-fan fan speed',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
'unit_of_measurement': 'rpm',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor[System Monitor another-fan fan speed - state]
|
||||
'1300'
|
||||
# ---
|
||||
# name: test_sensor[System Monitor cpu-fan fan speed - attributes]
|
||||
ReadOnlyDict({
|
||||
'friendly_name': 'System Monitor cpu-fan fan speed',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
'unit_of_measurement': 'rpm',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor[System Monitor cpu-fan fan speed - state]
|
||||
'1200'
|
||||
# ---
|
||||
|
||||
@@ -23,7 +23,6 @@ from .conftest import MockProcess
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||
|
||||
|
||||
@pytest.mark.freeze_time("2024-02-24 15:00:00", tz_offset=0)
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_sensor(
|
||||
hass: HomeAssistant,
|
||||
@@ -544,7 +543,7 @@ async def test_remove_obsolete_entities(
|
||||
mock_added_config_entry.entry_id
|
||||
)
|
||||
)
|
||||
== 44
|
||||
== 39
|
||||
)
|
||||
|
||||
entity_registry.async_update_entity(
|
||||
@@ -585,7 +584,7 @@ async def test_remove_obsolete_entities(
|
||||
mock_added_config_entry.entry_id
|
||||
)
|
||||
)
|
||||
== 45
|
||||
== 40
|
||||
)
|
||||
|
||||
assert (
|
||||
|
||||
@@ -93,7 +93,6 @@ async def test_reconfigure(hass: HomeAssistant) -> None:
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_update")
|
||||
async def test_options(hass: HomeAssistant) -> None:
|
||||
"""Test options flow."""
|
||||
entry = MockConfigEntry(
|
||||
|
||||
@@ -215,7 +215,6 @@ async def test_zeroconf_with_mac_device_exists_abort(
|
||||
assert result.get("reason") == "already_configured"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_wled")
|
||||
async def test_options_flow(
|
||||
hass: HomeAssistant, mock_config_entry: MockConfigEntry
|
||||
) -> None:
|
||||
|
||||
@@ -190,7 +190,6 @@ def test_run_json_flag_only() -> None:
|
||||
with (
|
||||
patch("builtins.print") as mock_print,
|
||||
patch.object(check_config, "check") as mock_check,
|
||||
patch("sys.argv", ["", "--json"]),
|
||||
):
|
||||
mock_check.return_value = {
|
||||
"except": {"domain1": ["error1", "error2"]},
|
||||
@@ -201,7 +200,7 @@ def test_run_json_flag_only() -> None:
|
||||
"yaml_files": {},
|
||||
}
|
||||
|
||||
exit_code = check_config.run(None)
|
||||
exit_code = check_config.run(["--json"])
|
||||
|
||||
# Should exit with code 1 (1 domain with errors)
|
||||
assert exit_code == 1
|
||||
@@ -234,10 +233,7 @@ def test_run_json_flag_only() -> None:
|
||||
def test_run_fail_on_warnings_flag_only() -> None:
|
||||
"""Test that --fail-on-warnings flag works independently."""
|
||||
# Test with warnings only
|
||||
with (
|
||||
patch.object(check_config, "check") as mock_check,
|
||||
patch("sys.argv", ["", "--fail-on-warnings"]),
|
||||
):
|
||||
with patch.object(check_config, "check") as mock_check:
|
||||
mock_check.return_value = {
|
||||
"except": {},
|
||||
"warn": {"light": ["warning message"]},
|
||||
@@ -247,7 +243,7 @@ def test_run_fail_on_warnings_flag_only() -> None:
|
||||
"yaml_files": {},
|
||||
}
|
||||
|
||||
exit_code = check_config.run(None)
|
||||
exit_code = check_config.run(["--fail-on-warnings"])
|
||||
assert exit_code == 1 # Should exit non-zero due to warnings
|
||||
|
||||
# Test with no warnings or errors
|
||||
@@ -286,7 +282,6 @@ def test_run_json_output_structure() -> None:
|
||||
with (
|
||||
patch("builtins.print") as mock_print,
|
||||
patch.object(check_config, "check") as mock_check,
|
||||
patch("sys.argv", ["", "--json", "--config", "/test/path"]),
|
||||
):
|
||||
mock_check.return_value = {
|
||||
"except": {"domain1": ["error1", {"config": "bad"}]},
|
||||
@@ -297,7 +292,7 @@ def test_run_json_output_structure() -> None:
|
||||
"yaml_files": {},
|
||||
}
|
||||
|
||||
exit_code = check_config.run(None)
|
||||
exit_code = check_config.run(["--json", "--config", "/test/path"])
|
||||
|
||||
json_output = mock_print.call_args[0][0]
|
||||
parsed_json = json.loads(json_output)
|
||||
@@ -418,11 +413,7 @@ def test_run_exit_code_logic() -> None:
|
||||
]
|
||||
|
||||
for errors, warnings, flags, expected_exit in test_cases:
|
||||
with (
|
||||
patch("builtins.print"),
|
||||
patch.object(check_config, "check") as mock_check,
|
||||
patch("sys.argv", ["", *flags]),
|
||||
):
|
||||
with patch("builtins.print"), patch.object(check_config, "check") as mock_check:
|
||||
mock_check.return_value = {
|
||||
"except": errors,
|
||||
"warn": warnings,
|
||||
@@ -432,7 +423,7 @@ def test_run_exit_code_logic() -> None:
|
||||
"yaml_files": {},
|
||||
}
|
||||
|
||||
exit_code = check_config.run(None)
|
||||
exit_code = check_config.run(flags)
|
||||
assert exit_code == expected_exit, (
|
||||
f"Failed for errors={errors}, warnings={warnings}, flags={flags}. "
|
||||
f"Expected {expected_exit}, got {exit_code}"
|
||||
@@ -456,7 +447,7 @@ def test_run_human_readable_still_works() -> None:
|
||||
"yaml_files": {},
|
||||
}
|
||||
|
||||
check_config.run(None)
|
||||
check_config.run([])
|
||||
|
||||
# Should print the "Testing configuration at" message
|
||||
printed_outputs = [
|
||||
@@ -472,11 +463,9 @@ def test_run_human_readable_still_works() -> None:
|
||||
|
||||
def test_run_with_config_path() -> None:
|
||||
"""Test that config path is correctly included in JSON output."""
|
||||
test_config_path = "/custom/config/path"
|
||||
with (
|
||||
patch("builtins.print") as mock_print,
|
||||
patch.object(check_config, "check") as mock_check,
|
||||
patch("sys.argv", ["", "--json", "--config", test_config_path]),
|
||||
):
|
||||
mock_check.return_value = {
|
||||
"except": {},
|
||||
@@ -487,7 +476,8 @@ def test_run_with_config_path() -> None:
|
||||
"yaml_files": {},
|
||||
}
|
||||
|
||||
check_config.run(None)
|
||||
test_config_path = "/custom/config/path"
|
||||
check_config.run(["--json", "--config", test_config_path])
|
||||
|
||||
json_output = mock_print.call_args[0][0]
|
||||
parsed_json = json.loads(json_output)
|
||||
@@ -505,7 +495,6 @@ def test_unknown_arguments_with_json() -> None:
|
||||
with (
|
||||
patch("builtins.print") as mock_print,
|
||||
patch.object(check_config, "check") as mock_check,
|
||||
patch("sys.argv", ["", "--json", "--unknown-flag", "value"]),
|
||||
):
|
||||
mock_check.return_value = {
|
||||
"except": {},
|
||||
@@ -516,7 +505,7 @@ def test_unknown_arguments_with_json() -> None:
|
||||
"yaml_files": {},
|
||||
}
|
||||
|
||||
check_config.run(None)
|
||||
check_config.run(["--json", "--unknown-flag", "value"])
|
||||
|
||||
# Should still print unknown argument warning AND JSON
|
||||
assert mock_print.call_count == 2
|
||||
@@ -539,7 +528,6 @@ def test_info_flag_with_json() -> None:
|
||||
with (
|
||||
patch("builtins.print") as mock_print,
|
||||
patch.object(check_config, "check") as mock_check,
|
||||
patch("sys.argv", ["", "--json", "--info", "light"]),
|
||||
):
|
||||
mock_check.return_value = {
|
||||
"except": {},
|
||||
@@ -551,7 +539,7 @@ def test_info_flag_with_json() -> None:
|
||||
}
|
||||
|
||||
# Test --json with --info - JSON should take precedence
|
||||
exit_code = check_config.run(None)
|
||||
exit_code = check_config.run(["--json", "--info", "light"])
|
||||
|
||||
assert exit_code == 0
|
||||
assert mock_print.call_count == 1
|
||||
@@ -576,7 +564,6 @@ def test_config_flag_variations() -> None:
|
||||
with (
|
||||
patch("builtins.print") as mock_print,
|
||||
patch.object(check_config, "check") as mock_check,
|
||||
patch("sys.argv", ["", *flags]),
|
||||
):
|
||||
mock_check.return_value = {
|
||||
"except": {},
|
||||
@@ -587,7 +574,7 @@ def test_config_flag_variations() -> None:
|
||||
"yaml_files": {},
|
||||
}
|
||||
|
||||
check_config.run(None)
|
||||
check_config.run(flags)
|
||||
|
||||
if "--json" in flags:
|
||||
json_output = json.loads(mock_print.call_args[0][0])
|
||||
@@ -600,10 +587,6 @@ def test_multiple_config_flags() -> None:
|
||||
with (
|
||||
patch("builtins.print") as mock_print,
|
||||
patch.object(check_config, "check") as mock_check,
|
||||
patch(
|
||||
"sys.argv",
|
||||
["", "--json", "--config", "/first/path", "--config", "/second/path"],
|
||||
),
|
||||
):
|
||||
mock_check.return_value = {
|
||||
"except": {},
|
||||
@@ -615,7 +598,9 @@ def test_multiple_config_flags() -> None:
|
||||
}
|
||||
|
||||
# Last config flag should win
|
||||
check_config.run(None)
|
||||
check_config.run(
|
||||
["--json", "--config", "/first/path", "--config", "/second/path"]
|
||||
)
|
||||
|
||||
json_output = json.loads(mock_print.call_args[0][0])
|
||||
expected_path = os.path.join(os.getcwd(), "/second/path")
|
||||
@@ -637,7 +622,6 @@ def test_fail_on_warnings_with_json_combinations() -> None:
|
||||
with (
|
||||
patch("builtins.print") as mock_print,
|
||||
patch.object(check_config, "check") as mock_check,
|
||||
patch("sys.argv", ["", "--json", "--fail-on-warnings"]),
|
||||
):
|
||||
mock_check.return_value = {
|
||||
"except": errors,
|
||||
@@ -648,7 +632,7 @@ def test_fail_on_warnings_with_json_combinations() -> None:
|
||||
"yaml_files": {},
|
||||
}
|
||||
|
||||
exit_code = check_config.run(None)
|
||||
exit_code = check_config.run(["--json", "--fail-on-warnings"])
|
||||
assert exit_code == expected_exit
|
||||
|
||||
# Should still output valid JSON
|
||||
|
||||
43
tests/test_const.py
Normal file
43
tests/test_const.py
Normal file
@@ -0,0 +1,43 @@
|
||||
"""Test const module."""
|
||||
|
||||
from enum import Enum
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant import const
|
||||
|
||||
from .common import help_test_all, import_and_test_deprecated_constant
|
||||
|
||||
|
||||
def _create_tuples(
|
||||
value: type[Enum] | list[Enum], constant_prefix: str
|
||||
) -> list[tuple[Enum, str]]:
|
||||
return [(enum, constant_prefix) for enum in value]
|
||||
|
||||
|
||||
def test_all() -> None:
|
||||
"""Test module.__all__ is correctly set."""
|
||||
help_test_all(const)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("replacement", "constant_name", "breaks_in_version"),
|
||||
[
|
||||
(const.UnitOfArea.SQUARE_METERS, "AREA_SQUARE_METERS", "2025.12"),
|
||||
],
|
||||
)
|
||||
def test_deprecated_constant_name_changes(
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
replacement: Enum,
|
||||
constant_name: str,
|
||||
breaks_in_version: str,
|
||||
) -> None:
|
||||
"""Test deprecated constants, where the name is not the same as the enum value."""
|
||||
import_and_test_deprecated_constant(
|
||||
caplog,
|
||||
const,
|
||||
constant_name,
|
||||
f"{replacement.__class__.__name__}.{replacement.name}",
|
||||
replacement,
|
||||
breaks_in_version,
|
||||
)
|
||||
@@ -1,9 +1,11 @@
|
||||
"""Test the flow classes."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
import dataclasses
|
||||
import logging
|
||||
from unittest.mock import Mock, patch
|
||||
from typing import Any
|
||||
from unittest.mock import AsyncMock, Mock, patch
|
||||
|
||||
import pytest
|
||||
import voluptuous as vol
|
||||
@@ -930,6 +932,329 @@ async def test_show_progress_fires_only_when_changed(
|
||||
) # change (description placeholder)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("task_side_effect", "flow_result"),
|
||||
[
|
||||
(None, data_entry_flow.FlowResultType.CREATE_ENTRY),
|
||||
(data_entry_flow.AbortFlow("fail"), data_entry_flow.FlowResultType.ABORT),
|
||||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("description", "expected_description"),
|
||||
[
|
||||
(None, None),
|
||||
({"title": "World"}, {"title": "World"}),
|
||||
(lambda x: {"title": "World"}, {"title": "World"}),
|
||||
],
|
||||
)
|
||||
async def test_progress_step(
|
||||
hass: HomeAssistant,
|
||||
manager: MockFlowManager,
|
||||
description: Callable[[data_entry_flow.FlowHandler], dict[str, Any]]
|
||||
| dict[str, Any]
|
||||
| None,
|
||||
expected_description: dict[str, Any] | None,
|
||||
task_side_effect: Exception | None,
|
||||
flow_result: data_entry_flow.FlowResultType,
|
||||
) -> None:
|
||||
"""Test progress_step decorator."""
|
||||
manager.hass = hass
|
||||
events = []
|
||||
task_init_evt = asyncio.Event()
|
||||
event_received_evt = asyncio.Event()
|
||||
task_result = Mock()
|
||||
task_result.side_effect = task_side_effect
|
||||
|
||||
@callback
|
||||
def capture_events(event: Event) -> None:
|
||||
events.append(event)
|
||||
event_received_evt.set()
|
||||
|
||||
@manager.mock_reg_handler("test")
|
||||
class TestFlow(data_entry_flow.FlowHandler):
|
||||
VERSION = 5
|
||||
|
||||
@data_entry_flow.progress_step(description_placeholders=description)
|
||||
async def async_step_init(self, user_input=None):
|
||||
await task_init_evt.wait()
|
||||
task_result()
|
||||
|
||||
return await self.async_step_finish()
|
||||
|
||||
async def async_step_finish(self, user_input=None):
|
||||
return self.async_create_entry(data={})
|
||||
|
||||
hass.bus.async_listen(
|
||||
data_entry_flow.EVENT_DATA_ENTRY_FLOW_PROGRESSED,
|
||||
capture_events,
|
||||
)
|
||||
|
||||
result = await manager.async_init("test")
|
||||
assert result["type"] == data_entry_flow.FlowResultType.SHOW_PROGRESS
|
||||
assert result["progress_action"] == "init"
|
||||
description_placeholders = result["description_placeholders"]
|
||||
assert description_placeholders == expected_description
|
||||
assert len(manager.async_progress()) == 1
|
||||
assert len(manager.async_progress_by_handler("test")) == 1
|
||||
assert manager.async_get(result["flow_id"])["handler"] == "test"
|
||||
|
||||
# Set task one done and wait for event
|
||||
task_init_evt.set()
|
||||
await event_received_evt.wait()
|
||||
event_received_evt.clear()
|
||||
assert len(events) == 1
|
||||
assert events[0].data == {
|
||||
"handler": "test",
|
||||
"flow_id": result["flow_id"],
|
||||
"refresh": True,
|
||||
}
|
||||
|
||||
# Frontend refreshes the flow
|
||||
result = await manager.async_configure(result["flow_id"])
|
||||
assert result["type"] == flow_result
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
(
|
||||
"task_init_side_effect", # side effect for initial step task
|
||||
"task_next_side_effect", # side effect for next step task
|
||||
"flow_result_before_init", # result before init task is done
|
||||
"flow_result_after_init", # result after init task is done
|
||||
"flow_result_after_next", # result after next task is done
|
||||
"flow_init_events", # number of events fired after init task is done
|
||||
"flow_next_events", # number of events fired after next task is done
|
||||
"manager_call_after_init", # lambda to continue the flow after init task
|
||||
"manager_call_after_next", # lambda to continue the flow after next task
|
||||
"before_init_task_side_effect", # function called before init event
|
||||
"before_next_task_side_effect", # function called before next event
|
||||
),
|
||||
[
|
||||
( # both steps show progress and complete successfully
|
||||
None,
|
||||
None,
|
||||
data_entry_flow.FlowResultType.SHOW_PROGRESS,
|
||||
data_entry_flow.FlowResultType.SHOW_PROGRESS,
|
||||
data_entry_flow.FlowResultType.CREATE_ENTRY,
|
||||
1,
|
||||
2,
|
||||
lambda manager, result: manager.async_configure(result["flow_id"]),
|
||||
lambda manager, result: manager.async_configure(result["flow_id"]),
|
||||
lambda received_event, init_task_event, next_task_event: None,
|
||||
lambda received_event, init_task_event, next_task_event: None,
|
||||
),
|
||||
( # first step aborts
|
||||
data_entry_flow.AbortFlow("fail"),
|
||||
None,
|
||||
data_entry_flow.FlowResultType.SHOW_PROGRESS,
|
||||
data_entry_flow.FlowResultType.ABORT,
|
||||
data_entry_flow.FlowResultType.ABORT,
|
||||
1,
|
||||
1,
|
||||
lambda manager, result: manager.async_configure(result["flow_id"]),
|
||||
lambda manager, result: AsyncMock(return_value=result)(),
|
||||
lambda received_event, init_task_event, next_task_event: None,
|
||||
lambda received_event, init_task_event, next_task_event: None,
|
||||
),
|
||||
( # first step shows progress, second step aborts
|
||||
None,
|
||||
data_entry_flow.AbortFlow("fail"),
|
||||
data_entry_flow.FlowResultType.SHOW_PROGRESS,
|
||||
data_entry_flow.FlowResultType.SHOW_PROGRESS,
|
||||
data_entry_flow.FlowResultType.ABORT,
|
||||
1,
|
||||
2,
|
||||
lambda manager, result: manager.async_configure(result["flow_id"]),
|
||||
lambda manager, result: manager.async_configure(result["flow_id"]),
|
||||
lambda received_event, init_task_event, next_task_event: None,
|
||||
lambda received_event, init_task_event, next_task_event: None,
|
||||
),
|
||||
( # first step task is already done, second step shows progress and completes
|
||||
None,
|
||||
None,
|
||||
data_entry_flow.FlowResultType.SHOW_PROGRESS_DONE,
|
||||
data_entry_flow.FlowResultType.SHOW_PROGRESS,
|
||||
data_entry_flow.FlowResultType.CREATE_ENTRY,
|
||||
0,
|
||||
1,
|
||||
lambda manager, result: manager.async_configure(result["flow_id"]),
|
||||
lambda manager, result: manager.async_configure(result["flow_id"]),
|
||||
lambda received_event,
|
||||
init_task_event,
|
||||
next_task_event: received_event.set() or init_task_event.set(),
|
||||
lambda received_event, init_task_event, next_task_event: None,
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_chaining_progress_steps(
|
||||
hass: HomeAssistant,
|
||||
manager: MockFlowManager,
|
||||
task_init_side_effect: Exception | None,
|
||||
task_next_side_effect: Exception | None,
|
||||
flow_result_before_init: data_entry_flow.FlowResultType,
|
||||
flow_result_after_init: data_entry_flow.FlowResultType,
|
||||
flow_result_after_next: data_entry_flow.FlowResultType,
|
||||
flow_init_events: int,
|
||||
flow_next_events: int,
|
||||
manager_call_after_init: Callable[
|
||||
[MockFlowManager, data_entry_flow.FlowResult], Any
|
||||
],
|
||||
manager_call_after_next: Callable[
|
||||
[MockFlowManager, data_entry_flow.FlowResult], Any
|
||||
],
|
||||
before_init_task_side_effect: Callable[
|
||||
[asyncio.Event, asyncio.Event, asyncio.Event], None
|
||||
],
|
||||
before_next_task_side_effect: Callable[
|
||||
[asyncio.Event, asyncio.Event, asyncio.Event], None
|
||||
],
|
||||
) -> None:
|
||||
"""Test chaining two steps with progress_step decorators."""
|
||||
manager.hass = hass
|
||||
events = []
|
||||
event_received_evt = asyncio.Event()
|
||||
task_init_evt = asyncio.Event()
|
||||
task_next_evt = asyncio.Event()
|
||||
task_init_result = Mock()
|
||||
task_init_result.side_effect = task_init_side_effect
|
||||
task_next_result = Mock()
|
||||
task_next_result.side_effect = task_next_side_effect
|
||||
|
||||
@callback
|
||||
def capture_events(event: Event) -> None:
|
||||
events.append(event)
|
||||
event_received_evt.set()
|
||||
|
||||
@manager.mock_reg_handler("test")
|
||||
class TestFlow(data_entry_flow.FlowHandler):
|
||||
VERSION = 5
|
||||
|
||||
def async_remove(self) -> None:
|
||||
# Disable event received event to allow test to finish if flow is aborted.
|
||||
event_received_evt.set()
|
||||
|
||||
@data_entry_flow.progress_step()
|
||||
async def async_step_init(self, user_input=None):
|
||||
await task_init_evt.wait()
|
||||
task_init_result()
|
||||
|
||||
return await self.async_step_next()
|
||||
|
||||
@data_entry_flow.progress_step()
|
||||
async def async_step_next(self, user_input=None):
|
||||
await task_next_evt.wait()
|
||||
task_next_result()
|
||||
|
||||
return await self.async_step_finish()
|
||||
|
||||
async def async_step_finish(self, user_input=None):
|
||||
return self.async_create_entry(data={})
|
||||
|
||||
hass.bus.async_listen(
|
||||
data_entry_flow.EVENT_DATA_ENTRY_FLOW_PROGRESSED,
|
||||
capture_events,
|
||||
)
|
||||
|
||||
# Run side effect before first event is awaited
|
||||
before_init_task_side_effect(event_received_evt, task_init_evt, task_next_evt)
|
||||
|
||||
result = await manager.async_init("test")
|
||||
assert result["type"] == flow_result_before_init
|
||||
assert len(manager.async_progress()) == 1
|
||||
assert len(manager.async_progress_by_handler("test")) == 1
|
||||
assert manager.async_get(result["flow_id"])["handler"] == "test"
|
||||
|
||||
# Set task init done and wait for event
|
||||
task_init_evt.set()
|
||||
await event_received_evt.wait()
|
||||
event_received_evt.clear()
|
||||
assert len(events) == flow_init_events
|
||||
|
||||
# Run side effect before second event is awaited
|
||||
before_next_task_side_effect(event_received_evt, task_init_evt, task_next_evt)
|
||||
|
||||
# Continue the flow if needed.
|
||||
result = await manager_call_after_init(manager, result)
|
||||
assert result["type"] == flow_result_after_init
|
||||
|
||||
# Set task next done and wait for event
|
||||
task_next_evt.set()
|
||||
await event_received_evt.wait()
|
||||
event_received_evt.clear()
|
||||
assert len(events) == flow_next_events
|
||||
|
||||
# Continue the flow if needed.
|
||||
result = await manager_call_after_next(manager, result)
|
||||
assert result["type"] == flow_result_after_next
|
||||
|
||||
|
||||
async def test_progress_step_result_reset(
|
||||
hass: HomeAssistant,
|
||||
manager: MockFlowManager,
|
||||
) -> None:
|
||||
"""Test progress_step decorator with reset result."""
|
||||
manager.hass = hass
|
||||
events = []
|
||||
task_init_evt = asyncio.Event()
|
||||
event_received_evt = asyncio.Event()
|
||||
|
||||
@callback
|
||||
def capture_events(event: Event) -> None:
|
||||
events.append(event)
|
||||
event_received_evt.set()
|
||||
|
||||
@manager.mock_reg_handler("test")
|
||||
class TestFlow(data_entry_flow.FlowHandler):
|
||||
VERSION = 5
|
||||
|
||||
@data_entry_flow.progress_step()
|
||||
async def async_step_init(self, user_input=None):
|
||||
await task_init_evt.wait()
|
||||
|
||||
return await self.async_step_finish()
|
||||
|
||||
async def async_step_finish(self, user_input=None):
|
||||
if user_input is None:
|
||||
return self.async_show_form(step_id="finish")
|
||||
return self.async_create_entry(data={})
|
||||
|
||||
hass.bus.async_listen(
|
||||
data_entry_flow.EVENT_DATA_ENTRY_FLOW_PROGRESSED,
|
||||
capture_events,
|
||||
)
|
||||
|
||||
first_result = await manager.async_init("test")
|
||||
assert first_result["type"] == data_entry_flow.FlowResultType.SHOW_PROGRESS
|
||||
assert first_result["progress_action"] == "init"
|
||||
assert len(manager.async_progress()) == 1
|
||||
assert len(manager.async_progress_by_handler("test")) == 1
|
||||
assert manager.async_get(first_result["flow_id"])["handler"] == "test"
|
||||
|
||||
# Set task one done and wait for event
|
||||
task_init_evt.set()
|
||||
await event_received_evt.wait()
|
||||
event_received_evt.clear()
|
||||
assert len(events) == 1
|
||||
assert events[0].data == {
|
||||
"handler": "test",
|
||||
"flow_id": first_result["flow_id"],
|
||||
"refresh": True,
|
||||
}
|
||||
|
||||
# Frontend refreshes the flow
|
||||
result = await manager.async_configure(first_result["flow_id"])
|
||||
assert result["type"] == data_entry_flow.FlowResultType.FORM
|
||||
assert result["step_id"] == "finish"
|
||||
|
||||
# Continue the flow again from the first result to test idempotency.
|
||||
result = await manager.async_configure(first_result["flow_id"])
|
||||
assert result["type"] == data_entry_flow.FlowResultType.FORM
|
||||
assert result["step_id"] == "finish"
|
||||
|
||||
# Finish the flow
|
||||
result = await manager.async_configure(first_result["flow_id"], {})
|
||||
assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY
|
||||
|
||||
|
||||
async def test_abort_flow_exception_step(manager: MockFlowManager) -> None:
|
||||
"""Test that the AbortFlow exception works in a step."""
|
||||
|
||||
|
||||
Reference in New Issue
Block a user