mirror of
https://github.com/home-assistant/core.git
synced 2025-09-23 03:49:31 +00:00
Compare commits
151 Commits
add-includ
...
trigger_ac
Author | SHA1 | Date | |
---|---|---|---|
![]() |
7d96a814f9 | ||
![]() |
86dc453c55 | ||
![]() |
a4f2c88c7f | ||
![]() |
3cdb894e61 | ||
![]() |
cb837aaae5 | ||
![]() |
82443ded34 | ||
![]() |
71cc3b7fcd | ||
![]() |
e5658f9747 | ||
![]() |
868ded141f | ||
![]() |
1151fa698d | ||
![]() |
2796d6110a | ||
![]() |
844b97bd32 | ||
![]() |
286b2500bd | ||
![]() |
4b7746ab51 | ||
![]() |
ca1c366f4f | ||
![]() |
de42ac14ac | ||
![]() |
7f7bd5a97f | ||
![]() |
8a70a1badb | ||
![]() |
181741cab6 | ||
![]() |
1e14fb6dab | ||
![]() |
2b6a125927 | ||
![]() |
e61ad10708 | ||
![]() |
5177f9e8c2 | ||
![]() |
850aeeb5eb | ||
![]() |
a1b9061060 | ||
![]() |
0ec1f27489 | ||
![]() |
befc93bc73 | ||
![]() |
1526d953bf | ||
![]() |
d38082a5c8 | ||
![]() |
42850421d2 | ||
![]() |
21a835c4b4 | ||
![]() |
e9294dbf72 | ||
![]() |
5c4dfbff1b | ||
![]() |
abe628506d | ||
![]() |
12cc0ed18d | ||
![]() |
8ca7562390 | ||
![]() |
942f7eebb1 | ||
![]() |
1a167e6aee | ||
![]() |
9531ae10f2 | ||
![]() |
bfc9616abf | ||
![]() |
054a5d751a | ||
![]() |
a43ba4f966 | ||
![]() |
1a5cae125f | ||
![]() |
f3b9bda876 | ||
![]() |
3f3aaa2815 | ||
![]() |
6dc7870779 | ||
![]() |
be83416c72 | ||
![]() |
c745ee18eb | ||
![]() |
cf907ae196 | ||
![]() |
8eee53036a | ||
![]() |
b37237d24b | ||
![]() |
950e758b62 | ||
![]() |
9cd940b7df | ||
![]() |
10b186a20d | ||
![]() |
757aec1c6b | ||
![]() |
0b159bdb9c | ||
![]() |
8728312e87 | ||
![]() |
bbb67db354 | ||
![]() |
265f5da21a | ||
![]() |
54859e8a83 | ||
![]() |
c87dba878d | ||
![]() |
8d8e008123 | ||
![]() |
b30667a469 | ||
![]() |
8920c548d5 | ||
![]() |
eac719f9af | ||
![]() |
71c274cb91 | ||
![]() |
d4902361e6 | ||
![]() |
f63eee3889 | ||
![]() |
21bfe610d1 | ||
![]() |
21c174e895 | ||
![]() |
ec148e0459 | ||
![]() |
286763b998 | ||
![]() |
5f88122a2b | ||
![]() |
31968d16ab | ||
![]() |
c125554817 | ||
![]() |
10f2955d34 | ||
![]() |
55712b784c | ||
![]() |
fe3a929556 | ||
![]() |
534801e80d | ||
![]() |
8aeda5a0c0 | ||
![]() |
eb1cbbc75c | ||
![]() |
fa8a4d7098 | ||
![]() |
2623ebac4d | ||
![]() |
1746c51ce4 | ||
![]() |
3499ed7a98 | ||
![]() |
2c809d5903 | ||
![]() |
40988198f3 | ||
![]() |
ab5d1d27f1 | ||
![]() |
1c10b85fed | ||
![]() |
91a7db08ff | ||
![]() |
a764d54123 | ||
![]() |
dc09e33556 | ||
![]() |
14173bd9ec | ||
![]() |
d2e7537629 | ||
![]() |
9a165a64fe | ||
![]() |
9c749a6abc | ||
![]() |
2e33222c71 | ||
![]() |
ab1c2c4f70 | ||
![]() |
529219ae69 | ||
![]() |
d6ce71fa61 | ||
![]() |
e5b67d513a | ||
![]() |
a547179f66 | ||
![]() |
8c61788a7d | ||
![]() |
6b934d94db | ||
![]() |
d30ad82774 | ||
![]() |
4618b33e93 | ||
![]() |
d6299094db | ||
![]() |
087d9d30c0 | ||
![]() |
f07890cf5c | ||
![]() |
e5b78cc481 | ||
![]() |
12b409d8e1 | ||
![]() |
def5408db8 | ||
![]() |
f105b45ee2 | ||
![]() |
9d904c30a7 | ||
![]() |
99b047939f | ||
![]() |
3a615908ee | ||
![]() |
baff541f46 | ||
![]() |
6d8c35cfe9 | ||
![]() |
b8d9883e74 | ||
![]() |
c3c65af450 | ||
![]() |
3af8616764 | ||
![]() |
64ec4609c5 | ||
![]() |
c78bc26b83 | ||
![]() |
0c093646c9 | ||
![]() |
1b27acdde0 | ||
![]() |
9dafc0e02f | ||
![]() |
0091dafcb0 | ||
![]() |
b387acffb7 | ||
![]() |
36b3133fa2 | ||
![]() |
fe01e96012 | ||
![]() |
0b56ec16ed | ||
![]() |
ca79f4c963 | ||
![]() |
9a43f2776d | ||
![]() |
0cda883b56 | ||
![]() |
ae58e633f0 | ||
![]() |
06480bfd9d | ||
![]() |
625f586945 | ||
![]() |
7dbeaa475d | ||
![]() |
dff3d5f8af | ||
![]() |
89c335919a | ||
![]() |
2bb4573357 | ||
![]() |
7037ce989c | ||
![]() |
bfdd2053ba | ||
![]() |
fcc3f92f8c | ||
![]() |
8710267d53 | ||
![]() |
85b6adcc9a | ||
![]() |
beec6e86e0 | ||
![]() |
3dacffaaf9 | ||
![]() |
d90f2a1de1 | ||
![]() |
b6c9217429 | ||
![]() |
7fc8da6769 |
@@ -142,6 +142,7 @@ homeassistant.components.cloud.*
|
||||
homeassistant.components.co2signal.*
|
||||
homeassistant.components.comelit.*
|
||||
homeassistant.components.command_line.*
|
||||
homeassistant.components.compit.*
|
||||
homeassistant.components.config.*
|
||||
homeassistant.components.configurator.*
|
||||
homeassistant.components.cookidoo.*
|
||||
|
8
CODEOWNERS
generated
8
CODEOWNERS
generated
@@ -292,6 +292,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/command_line/ @gjohansson-ST
|
||||
/homeassistant/components/compensation/ @Petro31
|
||||
/tests/components/compensation/ @Petro31
|
||||
/homeassistant/components/compit/ @Przemko92
|
||||
/tests/components/compit/ @Przemko92
|
||||
/homeassistant/components/config/ @home-assistant/core
|
||||
/tests/components/config/ @home-assistant/core
|
||||
/homeassistant/components/configurator/ @home-assistant/core
|
||||
@@ -770,6 +772,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/iqvia/ @bachya
|
||||
/tests/components/iqvia/ @bachya
|
||||
/homeassistant/components/irish_rail_transport/ @ttroy50
|
||||
/homeassistant/components/irm_kmi/ @jdejaegh
|
||||
/tests/components/irm_kmi/ @jdejaegh
|
||||
/homeassistant/components/iron_os/ @tr4nt0r
|
||||
/tests/components/iron_os/ @tr4nt0r
|
||||
/homeassistant/components/isal/ @bdraco
|
||||
@@ -1727,8 +1731,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/volumio/ @OnFreund
|
||||
/homeassistant/components/volvo/ @thomasddn
|
||||
/tests/components/volvo/ @thomasddn
|
||||
/homeassistant/components/volvooncall/ @molobrakos
|
||||
/tests/components/volvooncall/ @molobrakos
|
||||
/homeassistant/components/volvooncall/ @molobrakos @svrooij
|
||||
/tests/components/volvooncall/ @molobrakos @svrooij
|
||||
/homeassistant/components/wake_on_lan/ @ntilley905
|
||||
/tests/components/wake_on_lan/ @ntilley905
|
||||
/homeassistant/components/wake_word/ @home-assistant/core @synesthesiam
|
||||
|
@@ -109,7 +109,7 @@ class AssistPipelineSelect(SelectEntity, restore_state.RestoreEntity):
|
||||
)
|
||||
|
||||
state = await self.async_get_last_state()
|
||||
if state is not None and state.state in self.options:
|
||||
if (state is not None) and (state.state in self.options):
|
||||
self._attr_current_option = state.state
|
||||
|
||||
if self.registry_entry and (device_id := self.registry_entry.device_id):
|
||||
@@ -119,7 +119,7 @@ class AssistPipelineSelect(SelectEntity, restore_state.RestoreEntity):
|
||||
|
||||
def cleanup() -> None:
|
||||
"""Clean up registered device."""
|
||||
pipeline_data.pipeline_devices.pop(device_id)
|
||||
pipeline_data.pipeline_devices.pop(device_id, None)
|
||||
|
||||
self.async_on_remove(cleanup)
|
||||
|
||||
|
@@ -2,13 +2,12 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientResponseError
|
||||
from yalexs.activity import ActivityType, ActivityTypes
|
||||
from yalexs.lock import Lock, LockStatus
|
||||
from yalexs.activity import ActivityType
|
||||
from yalexs.lock import Lock, LockOperation, LockStatus
|
||||
from yalexs.util import get_latest_activity, update_lock_detail_from_activity
|
||||
|
||||
from homeassistant.components.lock import ATTR_CHANGED_BY, LockEntity, LockEntityFeature
|
||||
@@ -50,30 +49,25 @@ class AugustLock(AugustEntity, RestoreEntity, LockEntity):
|
||||
|
||||
async def async_lock(self, **kwargs: Any) -> None:
|
||||
"""Lock the device."""
|
||||
if self._data.push_updates_connected:
|
||||
await self._data.async_lock_async(self._device_id, self._hyper_bridge)
|
||||
return
|
||||
await self._call_lock_operation(self._data.async_lock)
|
||||
await self._perform_lock_operation(LockOperation.LOCK)
|
||||
|
||||
async def async_open(self, **kwargs: Any) -> None:
|
||||
"""Open/unlatch the device."""
|
||||
if self._data.push_updates_connected:
|
||||
await self._data.async_unlatch_async(self._device_id, self._hyper_bridge)
|
||||
return
|
||||
await self._call_lock_operation(self._data.async_unlatch)
|
||||
await self._perform_lock_operation(LockOperation.OPEN)
|
||||
|
||||
async def async_unlock(self, **kwargs: Any) -> None:
|
||||
"""Unlock the device."""
|
||||
if self._data.push_updates_connected:
|
||||
await self._data.async_unlock_async(self._device_id, self._hyper_bridge)
|
||||
return
|
||||
await self._call_lock_operation(self._data.async_unlock)
|
||||
await self._perform_lock_operation(LockOperation.UNLOCK)
|
||||
|
||||
async def _call_lock_operation(
|
||||
self, lock_operation: Callable[[str], Coroutine[Any, Any, list[ActivityTypes]]]
|
||||
) -> None:
|
||||
async def _perform_lock_operation(self, operation: LockOperation) -> None:
|
||||
"""Perform a lock operation."""
|
||||
try:
|
||||
activities = await lock_operation(self._device_id)
|
||||
activities = await self._data.async_operate_lock(
|
||||
self._device_id,
|
||||
operation,
|
||||
self._data.push_updates_connected,
|
||||
self._hyper_bridge,
|
||||
)
|
||||
except ClientResponseError as err:
|
||||
if err.status == LOCK_JAMMED_ERR:
|
||||
self._detail.lock_status = LockStatus.JAMMED
|
||||
|
45
homeassistant/components/compit/__init__.py
Normal file
45
homeassistant/components/compit/__init__.py
Normal file
@@ -0,0 +1,45 @@
|
||||
"""The Compit integration."""
|
||||
|
||||
from compit_inext_api import CannotConnect, CompitApiConnector, InvalidAuth
|
||||
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .coordinator import CompitConfigEntry, CompitDataUpdateCoordinator
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.CLIMATE,
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: CompitConfigEntry) -> bool:
|
||||
"""Set up Compit from a config entry."""
|
||||
|
||||
session = async_get_clientsession(hass)
|
||||
connector = CompitApiConnector(session)
|
||||
try:
|
||||
connected = await connector.init(
|
||||
entry.data[CONF_EMAIL], entry.data[CONF_PASSWORD], hass.config.language
|
||||
)
|
||||
except CannotConnect as e:
|
||||
raise ConfigEntryNotReady(f"Error while connecting to Compit: {e}") from e
|
||||
except InvalidAuth as e:
|
||||
raise ConfigEntryAuthFailed(
|
||||
f"Invalid credentials for {entry.data[CONF_EMAIL]}"
|
||||
) from e
|
||||
|
||||
if not connected:
|
||||
raise ConfigEntryAuthFailed("Authentication API error")
|
||||
|
||||
coordinator = CompitDataUpdateCoordinator(hass, entry, connector)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: CompitConfigEntry) -> bool:
|
||||
"""Unload an entry for the Compit integration."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
264
homeassistant/components/compit/climate.py
Normal file
264
homeassistant/components/compit/climate.py
Normal file
@@ -0,0 +1,264 @@
|
||||
"""Module contains the CompitClimate class for controlling climate entities."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from compit_inext_api import Param, Parameter
|
||||
from compit_inext_api.consts import (
|
||||
CompitFanMode,
|
||||
CompitHVACMode,
|
||||
CompitParameter,
|
||||
CompitPresetMode,
|
||||
)
|
||||
from propcache.api import cached_property
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
FAN_AUTO,
|
||||
FAN_HIGH,
|
||||
FAN_LOW,
|
||||
FAN_MEDIUM,
|
||||
FAN_OFF,
|
||||
PRESET_AWAY,
|
||||
PRESET_ECO,
|
||||
PRESET_HOME,
|
||||
PRESET_NONE,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, MANUFACTURER_NAME
|
||||
from .coordinator import CompitConfigEntry, CompitDataUpdateCoordinator
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
# Device class for climate devices in Compit system
|
||||
CLIMATE_DEVICE_CLASS = 10
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
COMPIT_MODE_MAP = {
|
||||
CompitHVACMode.COOL: HVACMode.COOL,
|
||||
CompitHVACMode.HEAT: HVACMode.HEAT,
|
||||
CompitHVACMode.OFF: HVACMode.OFF,
|
||||
}
|
||||
|
||||
COMPIT_FANSPEED_MAP = {
|
||||
CompitFanMode.OFF: FAN_OFF,
|
||||
CompitFanMode.AUTO: FAN_AUTO,
|
||||
CompitFanMode.LOW: FAN_LOW,
|
||||
CompitFanMode.MEDIUM: FAN_MEDIUM,
|
||||
CompitFanMode.HIGH: FAN_HIGH,
|
||||
CompitFanMode.HOLIDAY: FAN_AUTO,
|
||||
}
|
||||
|
||||
COMPIT_PRESET_MAP = {
|
||||
CompitPresetMode.AUTO: PRESET_HOME,
|
||||
CompitPresetMode.HOLIDAY: PRESET_ECO,
|
||||
CompitPresetMode.MANUAL: PRESET_NONE,
|
||||
CompitPresetMode.AWAY: PRESET_AWAY,
|
||||
}
|
||||
|
||||
HVAC_MODE_TO_COMPIT_MODE = {v: k for k, v in COMPIT_MODE_MAP.items()}
|
||||
FAN_MODE_TO_COMPIT_FAN_MODE = {v: k for k, v in COMPIT_FANSPEED_MAP.items()}
|
||||
PRESET_MODE_TO_COMPIT_PRESET_MODE = {v: k for k, v in COMPIT_PRESET_MAP.items()}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: CompitConfigEntry,
|
||||
async_add_devices: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the CompitClimate platform from a config entry."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
climate_entities = []
|
||||
for device_id in coordinator.connector.devices:
|
||||
device = coordinator.connector.devices[device_id]
|
||||
|
||||
if device.definition.device_class == CLIMATE_DEVICE_CLASS:
|
||||
climate_entities.append(
|
||||
CompitClimate(
|
||||
coordinator,
|
||||
device_id,
|
||||
{
|
||||
parameter.parameter_code: parameter
|
||||
for parameter in device.definition.parameters
|
||||
},
|
||||
device.definition.name,
|
||||
)
|
||||
)
|
||||
|
||||
async_add_devices(climate_entities)
|
||||
|
||||
|
||||
class CompitClimate(CoordinatorEntity[CompitDataUpdateCoordinator], ClimateEntity):
|
||||
"""Representation of a Compit climate device."""
|
||||
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_hvac_modes = [*COMPIT_MODE_MAP.values()]
|
||||
_attr_name = None
|
||||
_attr_has_entity_name = True
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
| ClimateEntityFeature.FAN_MODE
|
||||
| ClimateEntityFeature.PRESET_MODE
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: CompitDataUpdateCoordinator,
|
||||
device_id: int,
|
||||
parameters: dict[str, Parameter],
|
||||
device_name: str,
|
||||
) -> None:
|
||||
"""Initialize the climate device."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = f"{device_name}_{device_id}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, str(device_id))},
|
||||
name=device_name,
|
||||
manufacturer=MANUFACTURER_NAME,
|
||||
model=device_name,
|
||||
)
|
||||
|
||||
self.parameters = parameters
|
||||
self.device_id = device_id
|
||||
self.available_presets: Parameter | None = self.parameters.get(
|
||||
CompitParameter.PRESET_MODE.value
|
||||
)
|
||||
self.available_fan_modes: Parameter | None = self.parameters.get(
|
||||
CompitParameter.FAN_MODE.value
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return (
|
||||
super().available and self.device_id in self.coordinator.connector.devices
|
||||
)
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
value = self.get_parameter_value(CompitParameter.CURRENT_TEMPERATURE)
|
||||
if value is None:
|
||||
return None
|
||||
return float(value.value)
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the temperature we try to reach."""
|
||||
value = self.get_parameter_value(CompitParameter.SET_TARGET_TEMPERATURE)
|
||||
if value is None:
|
||||
return None
|
||||
return float(value.value)
|
||||
|
||||
@cached_property
|
||||
def preset_modes(self) -> list[str] | None:
|
||||
"""Return the available preset modes."""
|
||||
if self.available_presets is None or self.available_presets.details is None:
|
||||
return []
|
||||
|
||||
preset_modes = []
|
||||
for item in self.available_presets.details:
|
||||
if item is not None:
|
||||
ha_preset = COMPIT_PRESET_MAP.get(CompitPresetMode(item.state))
|
||||
if ha_preset and ha_preset not in preset_modes:
|
||||
preset_modes.append(ha_preset)
|
||||
|
||||
return preset_modes
|
||||
|
||||
@cached_property
|
||||
def fan_modes(self) -> list[str] | None:
|
||||
"""Return the available fan modes."""
|
||||
if self.available_fan_modes is None or self.available_fan_modes.details is None:
|
||||
return []
|
||||
|
||||
fan_modes = []
|
||||
for item in self.available_fan_modes.details:
|
||||
if item is not None:
|
||||
ha_fan_mode = COMPIT_FANSPEED_MAP.get(CompitFanMode(item.state))
|
||||
if ha_fan_mode and ha_fan_mode not in fan_modes:
|
||||
fan_modes.append(ha_fan_mode)
|
||||
|
||||
return fan_modes
|
||||
|
||||
@property
|
||||
def preset_mode(self) -> str | None:
|
||||
"""Return the current preset mode."""
|
||||
preset_mode = self.get_parameter_value(CompitParameter.PRESET_MODE)
|
||||
|
||||
if preset_mode:
|
||||
compit_preset_mode = CompitPresetMode(preset_mode.value)
|
||||
return COMPIT_PRESET_MAP.get(compit_preset_mode)
|
||||
return None
|
||||
|
||||
@property
|
||||
def fan_mode(self) -> str | None:
|
||||
"""Return the current fan mode."""
|
||||
fan_mode = self.get_parameter_value(CompitParameter.FAN_MODE)
|
||||
if fan_mode:
|
||||
compit_fan_mode = CompitFanMode(fan_mode.value)
|
||||
return COMPIT_FANSPEED_MAP.get(compit_fan_mode)
|
||||
return None
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode | None:
|
||||
"""Return the current HVAC mode."""
|
||||
hvac_mode = self.get_parameter_value(CompitParameter.HVAC_MODE)
|
||||
if hvac_mode:
|
||||
compit_hvac_mode = CompitHVACMode(hvac_mode.value)
|
||||
return COMPIT_MODE_MAP.get(compit_hvac_mode)
|
||||
return None
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
temp = kwargs.get(ATTR_TEMPERATURE)
|
||||
if temp is None:
|
||||
raise ServiceValidationError("Temperature argument missing")
|
||||
await self.set_parameter_value(CompitParameter.SET_TARGET_TEMPERATURE, temp)
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set new target HVAC mode."""
|
||||
|
||||
if not (mode := HVAC_MODE_TO_COMPIT_MODE.get(hvac_mode)):
|
||||
raise ServiceValidationError(f"Invalid hvac mode {hvac_mode}")
|
||||
|
||||
await self.set_parameter_value(CompitParameter.HVAC_MODE, mode.value)
|
||||
|
||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||
"""Set new target preset mode."""
|
||||
|
||||
compit_preset = PRESET_MODE_TO_COMPIT_PRESET_MODE.get(preset_mode)
|
||||
if compit_preset is None:
|
||||
raise ServiceValidationError(f"Invalid preset mode: {preset_mode}")
|
||||
|
||||
await self.set_parameter_value(CompitParameter.PRESET_MODE, compit_preset.value)
|
||||
|
||||
async def async_set_fan_mode(self, fan_mode: str) -> None:
|
||||
"""Set new target fan mode."""
|
||||
|
||||
compit_fan_mode = FAN_MODE_TO_COMPIT_FAN_MODE.get(fan_mode)
|
||||
if compit_fan_mode is None:
|
||||
raise ServiceValidationError(f"Invalid fan mode: {fan_mode}")
|
||||
|
||||
await self.set_parameter_value(CompitParameter.FAN_MODE, compit_fan_mode.value)
|
||||
|
||||
async def set_parameter_value(self, parameter: CompitParameter, value: int) -> None:
|
||||
"""Call the API to set a parameter to a new value."""
|
||||
await self.coordinator.connector.set_device_parameter(
|
||||
self.device_id, parameter, value
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
def get_parameter_value(self, parameter: CompitParameter) -> Param | None:
|
||||
"""Get the parameter value from the device state."""
|
||||
return self.coordinator.connector.get_device_parameter(
|
||||
self.device_id, parameter
|
||||
)
|
110
homeassistant/components/compit/config_flow.py
Normal file
110
homeassistant/components/compit/config_flow.py
Normal file
@@ -0,0 +1,110 @@
|
||||
"""Config flow for Compit integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from compit_inext_api import CannotConnect, CompitApiConnector, InvalidAuth
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_EMAIL): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
)
|
||||
|
||||
STEP_REAUTH_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class CompitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Compit."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_user(
|
||||
self,
|
||||
user_input: dict[str, Any] | None = None,
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
session = async_create_clientsession(self.hass)
|
||||
api = CompitApiConnector(session)
|
||||
success = False
|
||||
try:
|
||||
success = await api.init(
|
||||
user_input[CONF_EMAIL],
|
||||
user_input[CONF_PASSWORD],
|
||||
self.hass.config.language,
|
||||
)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
if not success:
|
||||
# Api returned unexpected result but no exception
|
||||
_LOGGER.error("Compit api returned unexpected result")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
await self.async_set_unique_id(user_input[CONF_EMAIL])
|
||||
|
||||
if self.source == SOURCE_REAUTH:
|
||||
self._abort_if_unique_id_mismatch()
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(), data_updates=user_input
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_EMAIL], data=user_input
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_reauth(self, data: Mapping[str, Any]) -> ConfigFlowResult:
|
||||
"""Handle re-auth."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm re-authentication."""
|
||||
errors: dict[str, str] = {}
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
reauth_entry_data = reauth_entry.data
|
||||
|
||||
if user_input:
|
||||
# Reuse async_step_user with combined credentials
|
||||
return await self.async_step_user(
|
||||
{
|
||||
CONF_EMAIL: reauth_entry_data[CONF_EMAIL],
|
||||
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||
}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=STEP_REAUTH_SCHEMA,
|
||||
description_placeholders={CONF_EMAIL: reauth_entry_data[CONF_EMAIL]},
|
||||
errors=errors,
|
||||
)
|
4
homeassistant/components/compit/const.py
Normal file
4
homeassistant/components/compit/const.py
Normal file
@@ -0,0 +1,4 @@
|
||||
"""Constants for the Compit integration."""
|
||||
|
||||
DOMAIN = "compit"
|
||||
MANUFACTURER_NAME = "Compit"
|
43
homeassistant/components/compit/coordinator.py
Normal file
43
homeassistant/components/compit/coordinator.py
Normal file
@@ -0,0 +1,43 @@
|
||||
"""Define an object to manage fetching Compit data."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from compit_inext_api import CompitApiConnector, DeviceInstance
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
type CompitConfigEntry = ConfigEntry[CompitDataUpdateCoordinator]
|
||||
|
||||
|
||||
class CompitDataUpdateCoordinator(DataUpdateCoordinator[dict[int, DeviceInstance]]):
|
||||
"""Class to manage fetching data from the API."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
connector: CompitApiConnector,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
self.connector = connector
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=SCAN_INTERVAL,
|
||||
config_entry=config_entry,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict[int, DeviceInstance]:
|
||||
"""Update data via library."""
|
||||
await self.connector.update_state(device_id=None) # Update all devices
|
||||
return self.connector.devices
|
12
homeassistant/components/compit/manifest.json
Normal file
12
homeassistant/components/compit/manifest.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"domain": "compit",
|
||||
"name": "Compit",
|
||||
"codeowners": ["@Przemko92"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/compit",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["compit"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["compit-inext-api==0.2.1"]
|
||||
}
|
86
homeassistant/components/compit/quality_scale.yaml
Normal file
86
homeassistant/components/compit/quality_scale.yaml
Normal file
@@ -0,0 +1,86 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not use any common modules.
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
Entities of this integration does not explicitly subscribe to events.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have an options flow.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: todo
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
test-coverage: todo
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration is a cloud service and does not support discovery.
|
||||
discovery: todo
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have any entities that should disabled by default.
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: |
|
||||
There is no need for icon translations.
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
stale-devices: todo
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: todo
|
||||
strict-typing: done
|
35
homeassistant/components/compit/strings.json
Normal file
35
homeassistant/components/compit/strings.json
Normal file
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Please enter your https://inext.compit.pl/ credentials.",
|
||||
"title": "Connect to Compit iNext",
|
||||
"data": {
|
||||
"email": "[%key:common::config_flow::data::email%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"email": "The email address of your inext.compit.pl account",
|
||||
"password": "The password of your inext.compit.pl account"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"description": "Please update your password for {email}",
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "[%key:component::compit::config::step::user::data_description::password%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
}
|
||||
}
|
||||
}
|
@@ -253,6 +253,7 @@ ECOWITT_SENSORS_MAPPING: Final = {
|
||||
),
|
||||
EcoWittSensorTypes.PM4: SensorEntityDescription(
|
||||
key="PM4",
|
||||
device_class=SensorDeviceClass.PM4,
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
|
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==41.1.0",
|
||||
"aioesphomeapi==41.5.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.3.0"
|
||||
],
|
||||
|
@@ -162,12 +162,12 @@ def setup_service_functions(
|
||||
It appears that all TCC-compatible systems support the same three zones modes.
|
||||
"""
|
||||
|
||||
@verify_domain_control(hass, DOMAIN)
|
||||
@verify_domain_control(DOMAIN)
|
||||
async def force_refresh(call: ServiceCall) -> None:
|
||||
"""Obtain the latest state data via the vendor's RESTful API."""
|
||||
await coordinator.async_refresh()
|
||||
|
||||
@verify_domain_control(hass, DOMAIN)
|
||||
@verify_domain_control(DOMAIN)
|
||||
async def set_system_mode(call: ServiceCall) -> None:
|
||||
"""Set the system mode."""
|
||||
assert coordinator.tcs is not None # mypy
|
||||
@@ -179,7 +179,7 @@ def setup_service_functions(
|
||||
}
|
||||
async_dispatcher_send(hass, DOMAIN, payload)
|
||||
|
||||
@verify_domain_control(hass, DOMAIN)
|
||||
@verify_domain_control(DOMAIN)
|
||||
async def set_zone_override(call: ServiceCall) -> None:
|
||||
"""Set the zone override (setpoint)."""
|
||||
entity_id = call.data[ATTR_ENTITY_ID]
|
||||
|
@@ -124,7 +124,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: GeniusHubConfigEntry) ->
|
||||
def setup_service_functions(hass: HomeAssistant, broker):
|
||||
"""Set up the service functions."""
|
||||
|
||||
@verify_domain_control(hass, DOMAIN)
|
||||
@verify_domain_control(DOMAIN)
|
||||
async def set_zone_mode(call: ServiceCall) -> None:
|
||||
"""Set the system mode."""
|
||||
entity_id = call.data[ATTR_ENTITY_ID]
|
||||
|
@@ -29,6 +29,7 @@ from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
@@ -70,6 +71,21 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
async def generate_content(call: ServiceCall) -> ServiceResponse:
|
||||
"""Generate content from text and optionally images."""
|
||||
LOGGER.warning(
|
||||
"Action '%s.%s' is deprecated and will be removed in the 2026.4.0 release. "
|
||||
"Please use the 'ai_task.generate_data' action instead",
|
||||
DOMAIN,
|
||||
SERVICE_GENERATE_CONTENT,
|
||||
)
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"deprecated_generate_content",
|
||||
breaks_in_ha_version="2026.4.0",
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_generate_content",
|
||||
)
|
||||
|
||||
prompt_parts = [call.data[CONF_PROMPT]]
|
||||
|
||||
|
@@ -150,10 +150,16 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_generate_content": {
|
||||
"title": "Deprecated 'generate_content' action",
|
||||
"description": "Action 'google_generative_ai_conversation.generate_content' is deprecated and will be removed in the 2026.4.0 release. Please use the 'ai_task.generate_data' action instead"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"generate_content": {
|
||||
"name": "Generate content",
|
||||
"description": "Generate content from a prompt consisting of text and optionally images",
|
||||
"name": "Generate content (deprecated)",
|
||||
"description": "Generate content from a prompt consisting of text and optionally images (deprecated)",
|
||||
"fields": {
|
||||
"prompt": {
|
||||
"name": "Prompt",
|
||||
|
@@ -10,9 +10,8 @@ from typing import Self, cast
|
||||
from google_photos_library_api.exceptions import GooglePhotosApiError
|
||||
from google_photos_library_api.model import Album, MediaItem
|
||||
|
||||
from homeassistant.components.media_player import MediaClass, MediaType
|
||||
from homeassistant.components.media_player import BrowseError, MediaClass, MediaType
|
||||
from homeassistant.components.media_source import (
|
||||
BrowseError,
|
||||
BrowseMediaSource,
|
||||
MediaSource,
|
||||
MediaSourceItem,
|
||||
|
@@ -37,7 +37,6 @@ PLATFORMS = [
|
||||
Platform.SELECT,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
Platform.TIME,
|
||||
]
|
||||
|
||||
|
||||
|
@@ -66,6 +66,14 @@
|
||||
"default": "mdi:stop"
|
||||
}
|
||||
},
|
||||
"number": {
|
||||
"start_in_relative": {
|
||||
"default": "mdi:progress-clock"
|
||||
},
|
||||
"finish_in_relative": {
|
||||
"default": "mdi:progress-clock"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"operation_state": {
|
||||
"default": "mdi:state-machine",
|
||||
@@ -251,14 +259,6 @@
|
||||
"i_dos_2_active": {
|
||||
"default": "mdi:numeric-2-circle"
|
||||
}
|
||||
},
|
||||
"time": {
|
||||
"start_in_relative": {
|
||||
"default": "mdi:progress-clock"
|
||||
},
|
||||
"finish_in_relative": {
|
||||
"default": "mdi:progress-clock"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1852,11 +1852,6 @@
|
||||
"i_dos2_active": {
|
||||
"name": "[%key:component::home_connect::services::set_program_and_options::fields::laundry_care_washer_option_i_dos2_active::name%]"
|
||||
}
|
||||
},
|
||||
"time": {
|
||||
"alarm_clock": {
|
||||
"name": "Alarm clock"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,172 +0,0 @@
|
||||
"""Provides time entities for Home Connect."""
|
||||
|
||||
from datetime import time
|
||||
from typing import cast
|
||||
|
||||
from aiohomeconnect.model import SettingKey
|
||||
from aiohomeconnect.model.error import HomeConnectError
|
||||
|
||||
from homeassistant.components.automation import automations_with_entity
|
||||
from homeassistant.components.script import scripts_with_entity
|
||||
from homeassistant.components.time import TimeEntity, TimeEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.issue_registry import (
|
||||
IssueSeverity,
|
||||
async_create_issue,
|
||||
async_delete_issue,
|
||||
)
|
||||
|
||||
from .common import setup_home_connect_entry
|
||||
from .const import DOMAIN
|
||||
from .coordinator import HomeConnectApplianceData, HomeConnectConfigEntry
|
||||
from .entity import HomeConnectEntity
|
||||
from .utils import get_dict_from_home_connect_error
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
TIME_ENTITIES = (
|
||||
TimeEntityDescription(
|
||||
key=SettingKey.BSH_COMMON_ALARM_CLOCK,
|
||||
translation_key="alarm_clock",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _get_entities_for_appliance(
|
||||
entry: HomeConnectConfigEntry,
|
||||
appliance: HomeConnectApplianceData,
|
||||
) -> list[HomeConnectEntity]:
|
||||
"""Get a list of entities."""
|
||||
return [
|
||||
HomeConnectTimeEntity(entry.runtime_data, appliance, description)
|
||||
for description in TIME_ENTITIES
|
||||
if description.key in appliance.settings
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: HomeConnectConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Home Connect switch."""
|
||||
setup_home_connect_entry(
|
||||
entry,
|
||||
_get_entities_for_appliance,
|
||||
async_add_entities,
|
||||
)
|
||||
|
||||
|
||||
def seconds_to_time(seconds: int) -> time:
|
||||
"""Convert seconds to a time object."""
|
||||
minutes, sec = divmod(seconds, 60)
|
||||
hours, minutes = divmod(minutes, 60)
|
||||
return time(hour=hours, minute=minutes, second=sec)
|
||||
|
||||
|
||||
def time_to_seconds(t: time) -> int:
|
||||
"""Convert a time object to seconds."""
|
||||
return t.hour * 3600 + t.minute * 60 + t.second
|
||||
|
||||
|
||||
class HomeConnectTimeEntity(HomeConnectEntity, TimeEntity):
|
||||
"""Time setting class for Home Connect."""
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Call when entity is added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if self.bsh_key is SettingKey.BSH_COMMON_ALARM_CLOCK:
|
||||
automations = automations_with_entity(self.hass, self.entity_id)
|
||||
scripts = scripts_with_entity(self.hass, self.entity_id)
|
||||
items = automations + scripts
|
||||
if not items:
|
||||
return
|
||||
|
||||
entity_reg: er.EntityRegistry = er.async_get(self.hass)
|
||||
entity_automations = [
|
||||
automation_entity
|
||||
for automation_id in automations
|
||||
if (automation_entity := entity_reg.async_get(automation_id))
|
||||
]
|
||||
entity_scripts = [
|
||||
script_entity
|
||||
for script_id in scripts
|
||||
if (script_entity := entity_reg.async_get(script_id))
|
||||
]
|
||||
|
||||
items_list = [
|
||||
f"- [{item.original_name}](/config/automation/edit/{item.unique_id})"
|
||||
for item in entity_automations
|
||||
] + [
|
||||
f"- [{item.original_name}](/config/script/edit/{item.unique_id})"
|
||||
for item in entity_scripts
|
||||
]
|
||||
|
||||
async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"deprecated_time_alarm_clock_in_automations_scripts_{self.entity_id}",
|
||||
breaks_in_ha_version="2025.10.0",
|
||||
is_fixable=True,
|
||||
is_persistent=True,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_time_alarm_clock",
|
||||
translation_placeholders={
|
||||
"entity_id": self.entity_id,
|
||||
"items": "\n".join(items_list),
|
||||
},
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Call when entity will be removed from hass."""
|
||||
if self.bsh_key is SettingKey.BSH_COMMON_ALARM_CLOCK:
|
||||
async_delete_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"deprecated_time_alarm_clock_in_automations_scripts_{self.entity_id}",
|
||||
)
|
||||
async_delete_issue(
|
||||
self.hass, DOMAIN, f"deprecated_time_alarm_clock_{self.entity_id}"
|
||||
)
|
||||
|
||||
async def async_set_value(self, value: time) -> None:
|
||||
"""Set the native value of the entity."""
|
||||
async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"deprecated_time_alarm_clock_{self.entity_id}",
|
||||
breaks_in_ha_version="2025.10.0",
|
||||
is_fixable=True,
|
||||
is_persistent=True,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_time_alarm_clock",
|
||||
translation_placeholders={
|
||||
"entity_id": self.entity_id,
|
||||
},
|
||||
)
|
||||
try:
|
||||
await self.coordinator.client.set_setting(
|
||||
self.appliance.info.ha_id,
|
||||
setting_key=SettingKey(self.bsh_key),
|
||||
value=time_to_seconds(value),
|
||||
)
|
||||
except HomeConnectError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="set_setting_entity",
|
||||
translation_placeholders={
|
||||
**get_dict_from_home_connect_error(err),
|
||||
"entity_id": self.entity_id,
|
||||
"key": self.bsh_key,
|
||||
"value": str(value),
|
||||
},
|
||||
) from err
|
||||
|
||||
def update_native_value(self) -> None:
|
||||
"""Set the value of the entity."""
|
||||
data = self.appliance.settings[cast(SettingKey, self.bsh_key)]
|
||||
self._attr_native_value = seconds_to_time(data.value)
|
@@ -337,7 +337,14 @@ class HKDevice:
|
||||
# We need to explicitly poll characteristics to get fresh sensor readings
|
||||
# before processing the entity map and creating devices.
|
||||
# Use poll_all=True since entities haven't registered their characteristics yet.
|
||||
await self.async_update(poll_all=True)
|
||||
try:
|
||||
await self.async_update(poll_all=True)
|
||||
except ValueError as exc:
|
||||
_LOGGER.debug(
|
||||
"Accessory %s responded with unparsable response, first update was skipped: %s",
|
||||
self.unique_id,
|
||||
exc,
|
||||
)
|
||||
|
||||
await self.async_process_entity_map()
|
||||
|
||||
|
@@ -14,6 +14,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiohomekit", "commentjson"],
|
||||
"requirements": ["aiohomekit==3.2.17"],
|
||||
"requirements": ["aiohomekit==3.2.18"],
|
||||
"zeroconf": ["_hap._tcp.local.", "_hap._udp.local."]
|
||||
}
|
||||
|
@@ -124,7 +124,7 @@ SCHEMA_SET_HOME_COOLING_MODE = vol.Schema(
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the HomematicIP Cloud services."""
|
||||
|
||||
@verify_domain_control(hass, DOMAIN)
|
||||
@verify_domain_control(DOMAIN)
|
||||
async def async_call_hmipc_service(service: ServiceCall) -> None:
|
||||
"""Call correct HomematicIP Cloud service."""
|
||||
service_name = service.service
|
||||
|
@@ -88,7 +88,7 @@
|
||||
"message": "Honeywell set temperature failed: invalid temperature {temperature}"
|
||||
},
|
||||
"temp_failed_range": {
|
||||
"message": "Honeywell set temperature failed: temperature out of range. Mode: {mode}, Heat Temperuature: {heat}, Cool Temperature: {cool}"
|
||||
"message": "Honeywell set temperature failed: temperature out of range. Mode: {mode}, Heat temperature: {heat}, Cool temperature: {cool}"
|
||||
},
|
||||
"set_hold_failed": {
|
||||
"message": "Honeywell could not set permanent hold"
|
||||
|
@@ -64,7 +64,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_HUE_ACTIVATE_SCENE,
|
||||
verify_domain_control(hass, DOMAIN)(hue_activate_scene),
|
||||
verify_domain_control(DOMAIN)(hue_activate_scene),
|
||||
schema=vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_GROUP_NAME): cv.string,
|
||||
|
@@ -9,9 +9,8 @@ from aioimmich.assets.models import ImmichAsset
|
||||
from aioimmich.exceptions import ImmichError
|
||||
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.components.media_player import MediaClass
|
||||
from homeassistant.components.media_player import BrowseError, MediaClass
|
||||
from homeassistant.components.media_source import (
|
||||
BrowseError,
|
||||
BrowseMediaSource,
|
||||
MediaSource,
|
||||
MediaSourceItem,
|
||||
|
40
homeassistant/components/irm_kmi/__init__.py
Normal file
40
homeassistant/components/irm_kmi/__init__.py
Normal file
@@ -0,0 +1,40 @@
|
||||
"""Integration for IRM KMI weather."""
|
||||
|
||||
import logging
|
||||
|
||||
from irm_kmi_api import IrmKmiApiClientHa
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import IRM_KMI_TO_HA_CONDITION_MAP, PLATFORMS, USER_AGENT
|
||||
from .coordinator import IrmKmiConfigEntry, IrmKmiCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: IrmKmiConfigEntry) -> bool:
|
||||
"""Set up this integration using UI."""
|
||||
api_client = IrmKmiApiClientHa(
|
||||
session=async_get_clientsession(hass),
|
||||
user_agent=USER_AGENT,
|
||||
cdt_map=IRM_KMI_TO_HA_CONDITION_MAP,
|
||||
)
|
||||
|
||||
entry.runtime_data = IrmKmiCoordinator(hass, entry, api_client)
|
||||
|
||||
await entry.runtime_data.async_config_entry_first_refresh()
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: IrmKmiConfigEntry) -> bool:
|
||||
"""Handle removal of an entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_reload_entry(hass: HomeAssistant, entry: IrmKmiConfigEntry) -> None:
|
||||
"""Reload config entry."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
132
homeassistant/components/irm_kmi/config_flow.py
Normal file
132
homeassistant/components/irm_kmi/config_flow.py
Normal file
@@ -0,0 +1,132 @@
|
||||
"""Config flow to set up IRM KMI integration via the UI."""
|
||||
|
||||
import logging
|
||||
|
||||
from irm_kmi_api import IrmKmiApiClient, IrmKmiApiError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
OptionsFlowWithReload,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
ATTR_LATITUDE,
|
||||
ATTR_LONGITUDE,
|
||||
CONF_LOCATION,
|
||||
CONF_UNIQUE_ID,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import (
|
||||
LocationSelector,
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
SelectSelectorMode,
|
||||
)
|
||||
|
||||
from .const import (
|
||||
CONF_LANGUAGE_OVERRIDE,
|
||||
CONF_LANGUAGE_OVERRIDE_OPTIONS,
|
||||
DOMAIN,
|
||||
OUT_OF_BENELUX,
|
||||
USER_AGENT,
|
||||
)
|
||||
from .coordinator import IrmKmiConfigEntry
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IrmKmiConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Configuration flow for the IRM KMI integration."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(_config_entry: IrmKmiConfigEntry) -> OptionsFlow:
|
||||
"""Create the options flow."""
|
||||
return IrmKmiOptionFlow()
|
||||
|
||||
async def async_step_user(self, user_input: dict | None = None) -> ConfigFlowResult:
|
||||
"""Define the user step of the configuration flow."""
|
||||
errors: dict = {}
|
||||
|
||||
default_location = {
|
||||
ATTR_LATITUDE: self.hass.config.latitude,
|
||||
ATTR_LONGITUDE: self.hass.config.longitude,
|
||||
}
|
||||
|
||||
if user_input:
|
||||
_LOGGER.debug("Provided config user is: %s", user_input)
|
||||
|
||||
lat: float = user_input[CONF_LOCATION][ATTR_LATITUDE]
|
||||
lon: float = user_input[CONF_LOCATION][ATTR_LONGITUDE]
|
||||
|
||||
try:
|
||||
api_data = await IrmKmiApiClient(
|
||||
session=async_get_clientsession(self.hass),
|
||||
user_agent=USER_AGENT,
|
||||
).get_forecasts_coord({"lat": lat, "long": lon})
|
||||
except IrmKmiApiError:
|
||||
_LOGGER.exception(
|
||||
"Encountered an unexpected error while configuring the integration"
|
||||
)
|
||||
return self.async_abort(reason="api_error")
|
||||
|
||||
if api_data["cityName"] in OUT_OF_BENELUX:
|
||||
errors[CONF_LOCATION] = "out_of_benelux"
|
||||
|
||||
if not errors:
|
||||
name: str = api_data["cityName"]
|
||||
country: str = api_data["country"]
|
||||
unique_id: str = f"{name.lower()} {country.lower()}"
|
||||
await self.async_set_unique_id(unique_id)
|
||||
self._abort_if_unique_id_configured()
|
||||
user_input[CONF_UNIQUE_ID] = unique_id
|
||||
|
||||
return self.async_create_entry(title=name, data=user_input)
|
||||
|
||||
default_location = user_input[CONF_LOCATION]
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_LOCATION, default=default_location
|
||||
): LocationSelector()
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
class IrmKmiOptionFlow(OptionsFlowWithReload):
|
||||
"""Option flow for the IRM KMI integration, help change the options once the integration was configured."""
|
||||
|
||||
async def async_step_init(self, user_input: dict | None = None) -> ConfigFlowResult:
|
||||
"""Manage the options."""
|
||||
if user_input is not None:
|
||||
_LOGGER.debug("Provided config user is: %s", user_input)
|
||||
return self.async_create_entry(data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_LANGUAGE_OVERRIDE,
|
||||
default=self.config_entry.options.get(
|
||||
CONF_LANGUAGE_OVERRIDE, "none"
|
||||
),
|
||||
): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=CONF_LANGUAGE_OVERRIDE_OPTIONS,
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
translation_key=CONF_LANGUAGE_OVERRIDE,
|
||||
)
|
||||
)
|
||||
}
|
||||
),
|
||||
)
|
102
homeassistant/components/irm_kmi/const.py
Normal file
102
homeassistant/components/irm_kmi/const.py
Normal file
@@ -0,0 +1,102 @@
|
||||
"""Constants for the IRM KMI integration."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
from homeassistant.components.weather import (
|
||||
ATTR_CONDITION_CLEAR_NIGHT,
|
||||
ATTR_CONDITION_CLOUDY,
|
||||
ATTR_CONDITION_FOG,
|
||||
ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
ATTR_CONDITION_PARTLYCLOUDY,
|
||||
ATTR_CONDITION_POURING,
|
||||
ATTR_CONDITION_RAINY,
|
||||
ATTR_CONDITION_SNOWY,
|
||||
ATTR_CONDITION_SNOWY_RAINY,
|
||||
ATTR_CONDITION_SUNNY,
|
||||
)
|
||||
from homeassistant.const import Platform, __version__
|
||||
|
||||
DOMAIN: Final = "irm_kmi"
|
||||
PLATFORMS: Final = [Platform.WEATHER]
|
||||
|
||||
OUT_OF_BENELUX: Final = [
|
||||
"außerhalb der Benelux (Brussels)",
|
||||
"Hors de Belgique (Bxl)",
|
||||
"Outside the Benelux (Brussels)",
|
||||
"Buiten de Benelux (Brussel)",
|
||||
]
|
||||
LANGS: Final = ["en", "fr", "nl", "de"]
|
||||
|
||||
CONF_LANGUAGE_OVERRIDE: Final = "language_override"
|
||||
CONF_LANGUAGE_OVERRIDE_OPTIONS: Final = ["none", "fr", "nl", "de", "en"]
|
||||
|
||||
# Dict to map ('ww', 'dayNight') tuple from IRM KMI to HA conditions.
|
||||
IRM_KMI_TO_HA_CONDITION_MAP: Final = {
|
||||
(0, "d"): ATTR_CONDITION_SUNNY,
|
||||
(0, "n"): ATTR_CONDITION_CLEAR_NIGHT,
|
||||
(1, "d"): ATTR_CONDITION_SUNNY,
|
||||
(1, "n"): ATTR_CONDITION_CLEAR_NIGHT,
|
||||
(2, "d"): ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
(2, "n"): ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
(3, "d"): ATTR_CONDITION_PARTLYCLOUDY,
|
||||
(3, "n"): ATTR_CONDITION_PARTLYCLOUDY,
|
||||
(4, "d"): ATTR_CONDITION_POURING,
|
||||
(4, "n"): ATTR_CONDITION_POURING,
|
||||
(5, "d"): ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
(5, "n"): ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
(6, "d"): ATTR_CONDITION_POURING,
|
||||
(6, "n"): ATTR_CONDITION_POURING,
|
||||
(7, "d"): ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
(7, "n"): ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
(8, "d"): ATTR_CONDITION_SNOWY_RAINY,
|
||||
(8, "n"): ATTR_CONDITION_SNOWY_RAINY,
|
||||
(9, "d"): ATTR_CONDITION_SNOWY_RAINY,
|
||||
(9, "n"): ATTR_CONDITION_SNOWY_RAINY,
|
||||
(10, "d"): ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
(10, "n"): ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
(11, "d"): ATTR_CONDITION_SNOWY,
|
||||
(11, "n"): ATTR_CONDITION_SNOWY,
|
||||
(12, "d"): ATTR_CONDITION_SNOWY,
|
||||
(12, "n"): ATTR_CONDITION_SNOWY,
|
||||
(13, "d"): ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
(13, "n"): ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
(14, "d"): ATTR_CONDITION_CLOUDY,
|
||||
(14, "n"): ATTR_CONDITION_CLOUDY,
|
||||
(15, "d"): ATTR_CONDITION_CLOUDY,
|
||||
(15, "n"): ATTR_CONDITION_CLOUDY,
|
||||
(16, "d"): ATTR_CONDITION_POURING,
|
||||
(16, "n"): ATTR_CONDITION_POURING,
|
||||
(17, "d"): ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
(17, "n"): ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
(18, "d"): ATTR_CONDITION_RAINY,
|
||||
(18, "n"): ATTR_CONDITION_RAINY,
|
||||
(19, "d"): ATTR_CONDITION_POURING,
|
||||
(19, "n"): ATTR_CONDITION_POURING,
|
||||
(20, "d"): ATTR_CONDITION_SNOWY_RAINY,
|
||||
(20, "n"): ATTR_CONDITION_SNOWY_RAINY,
|
||||
(21, "d"): ATTR_CONDITION_RAINY,
|
||||
(21, "n"): ATTR_CONDITION_RAINY,
|
||||
(22, "d"): ATTR_CONDITION_SNOWY,
|
||||
(22, "n"): ATTR_CONDITION_SNOWY,
|
||||
(23, "d"): ATTR_CONDITION_SNOWY,
|
||||
(23, "n"): ATTR_CONDITION_SNOWY,
|
||||
(24, "d"): ATTR_CONDITION_FOG,
|
||||
(24, "n"): ATTR_CONDITION_FOG,
|
||||
(25, "d"): ATTR_CONDITION_FOG,
|
||||
(25, "n"): ATTR_CONDITION_FOG,
|
||||
(26, "d"): ATTR_CONDITION_FOG,
|
||||
(26, "n"): ATTR_CONDITION_FOG,
|
||||
(27, "d"): ATTR_CONDITION_FOG,
|
||||
(27, "n"): ATTR_CONDITION_FOG,
|
||||
}
|
||||
|
||||
IRM_KMI_NAME: Final = {
|
||||
"fr": "Institut Royal Météorologique de Belgique",
|
||||
"nl": "Koninklijk Meteorologisch Instituut van België",
|
||||
"de": "Königliche Meteorologische Institut von Belgien",
|
||||
"en": "Royal Meteorological Institute of Belgium",
|
||||
}
|
||||
|
||||
USER_AGENT: Final = (
|
||||
f"https://www.home-assistant.io/integrations/irm_kmi (version {__version__})"
|
||||
)
|
95
homeassistant/components/irm_kmi/coordinator.py
Normal file
95
homeassistant/components/irm_kmi/coordinator.py
Normal file
@@ -0,0 +1,95 @@
|
||||
"""DataUpdateCoordinator for the IRM KMI integration."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from irm_kmi_api import IrmKmiApiClientHa, IrmKmiApiError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE, CONF_LOCATION
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
TimestampDataUpdateCoordinator,
|
||||
UpdateFailed,
|
||||
)
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.dt import utcnow
|
||||
|
||||
from .data import ProcessedCoordinatorData
|
||||
from .utils import preferred_language
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type IrmKmiConfigEntry = ConfigEntry[IrmKmiCoordinator]
|
||||
|
||||
|
||||
class IrmKmiCoordinator(TimestampDataUpdateCoordinator[ProcessedCoordinatorData]):
|
||||
"""Coordinator to update data from IRM KMI."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
entry: IrmKmiConfigEntry,
|
||||
api_client: IrmKmiApiClientHa,
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name="IRM KMI weather",
|
||||
update_interval=timedelta(minutes=7),
|
||||
)
|
||||
self._api = api_client
|
||||
self._location = entry.data[CONF_LOCATION]
|
||||
|
||||
async def _async_update_data(self) -> ProcessedCoordinatorData:
|
||||
"""Fetch data from API endpoint.
|
||||
|
||||
This is the place to pre-process the data to lookup tables so entities can quickly look up their data.
|
||||
:return: ProcessedCoordinatorData
|
||||
"""
|
||||
|
||||
self._api.expire_cache()
|
||||
|
||||
try:
|
||||
await self._api.refresh_forecasts_coord(
|
||||
{
|
||||
"lat": self._location[ATTR_LATITUDE],
|
||||
"long": self._location[ATTR_LONGITUDE],
|
||||
}
|
||||
)
|
||||
|
||||
except IrmKmiApiError as err:
|
||||
if (
|
||||
self.last_update_success_time is not None
|
||||
and self.update_interval is not None
|
||||
and self.last_update_success_time - utcnow()
|
||||
< timedelta(seconds=2.5 * self.update_interval.seconds)
|
||||
):
|
||||
return self.data
|
||||
|
||||
_LOGGER.warning(
|
||||
"Could not connect to the API since %s", self.last_update_success_time
|
||||
)
|
||||
raise UpdateFailed(
|
||||
f"Error communicating with API for general forecast: {err}. "
|
||||
f"Last success time is: {self.last_update_success_time}"
|
||||
) from err
|
||||
|
||||
if not self.last_update_success:
|
||||
_LOGGER.warning("Successfully reconnected to the API")
|
||||
|
||||
return await self.process_api_data()
|
||||
|
||||
async def process_api_data(self) -> ProcessedCoordinatorData:
|
||||
"""From the API data, create the object that will be used in the entities."""
|
||||
tz = await dt_util.async_get_time_zone("Europe/Brussels")
|
||||
lang = preferred_language(self.hass, self.config_entry)
|
||||
|
||||
return ProcessedCoordinatorData(
|
||||
current_weather=self._api.get_current_weather(tz),
|
||||
daily_forecast=self._api.get_daily_forecast(tz, lang),
|
||||
hourly_forecast=self._api.get_hourly_forecast(tz),
|
||||
country=self._api.get_country(),
|
||||
)
|
17
homeassistant/components/irm_kmi/data.py
Normal file
17
homeassistant/components/irm_kmi/data.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""Define data classes for the IRM KMI integration."""
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from irm_kmi_api import CurrentWeatherData, ExtendedForecast
|
||||
|
||||
from homeassistant.components.weather import Forecast
|
||||
|
||||
|
||||
@dataclass
|
||||
class ProcessedCoordinatorData:
|
||||
"""Dataclass that will be exposed to the entities consuming data from an IrmKmiCoordinator."""
|
||||
|
||||
current_weather: CurrentWeatherData
|
||||
country: str
|
||||
hourly_forecast: list[Forecast] = field(default_factory=list)
|
||||
daily_forecast: list[ExtendedForecast] = field(default_factory=list)
|
28
homeassistant/components/irm_kmi/entity.py
Normal file
28
homeassistant/components/irm_kmi/entity.py
Normal file
@@ -0,0 +1,28 @@
|
||||
"""Base class shared among IRM KMI entities."""
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, IRM_KMI_NAME
|
||||
from .coordinator import IrmKmiConfigEntry, IrmKmiCoordinator
|
||||
from .utils import preferred_language
|
||||
|
||||
|
||||
class IrmKmiBaseEntity(CoordinatorEntity[IrmKmiCoordinator]):
|
||||
"""Base methods for IRM KMI entities."""
|
||||
|
||||
_attr_attribution = (
|
||||
"Weather data from the Royal Meteorological Institute of Belgium meteo.be"
|
||||
)
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, entry: IrmKmiConfigEntry) -> None:
|
||||
"""Init base properties for IRM KMI entities."""
|
||||
coordinator = entry.runtime_data
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
identifiers={(DOMAIN, entry.entry_id)},
|
||||
manufacturer=IRM_KMI_NAME.get(preferred_language(self.hass, entry)),
|
||||
)
|
13
homeassistant/components/irm_kmi/manifest.json
Normal file
13
homeassistant/components/irm_kmi/manifest.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"domain": "irm_kmi",
|
||||
"name": "IRM KMI Weather Belgium",
|
||||
"codeowners": ["@jdejaegh"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["zone"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/irm_kmi",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["irm_kmi_api"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["irm-kmi-api==1.1.0"]
|
||||
}
|
86
homeassistant/components/irm_kmi/quality_scale.yaml
Normal file
86
homeassistant/components/irm_kmi/quality_scale.yaml
Normal file
@@ -0,0 +1,86 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: >
|
||||
No service action implemented in this integration at the moment.
|
||||
appropriate-polling:
|
||||
status: done
|
||||
comment: >
|
||||
Polling interval is set to 7 minutes.
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: >
|
||||
No service action implemented in this integration at the moment.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: >
|
||||
No service action implemented in this integration at the moment.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: >
|
||||
There is no authentication for this integration
|
||||
test-coverage: todo
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: >
|
||||
The integration does not look for devices on the network. It uses an online API.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: >
|
||||
The integration does not look for devices on the network. It uses an online API.
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices:
|
||||
status: exempt
|
||||
comment: >
|
||||
This integration does not integrate physical devices.
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: done
|
||||
dynamic-devices: done
|
||||
entity-category: todo
|
||||
entity-device-class: todo
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: todo
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow:
|
||||
status: exempt
|
||||
comment: >
|
||||
There is no configuration per se, just a zone to pick.
|
||||
repair-issues: done
|
||||
stale-devices: done
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: todo
|
50
homeassistant/components/irm_kmi/strings.json
Normal file
50
homeassistant/components/irm_kmi/strings.json
Normal file
@@ -0,0 +1,50 @@
|
||||
{
|
||||
"title": "Royal Meteorological Institute of Belgium",
|
||||
"common": {
|
||||
"language_override_description": "Override the Home Assistant language for the textual weather forecast."
|
||||
},
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_location%]",
|
||||
"api_error": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"location": "[%key:common::config_flow::data::location%]"
|
||||
},
|
||||
"data_description": {
|
||||
"location": "[%key:common::config_flow::data::location%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"out_of_benelux": "The location is outside of Benelux. Pick a location in Benelux."
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"language_override": {
|
||||
"options": {
|
||||
"none": "Follow Home Assistant server language",
|
||||
"fr": "French",
|
||||
"nl": "Dutch",
|
||||
"de": "German",
|
||||
"en": "English"
|
||||
}
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"title": "Options",
|
||||
"data": {
|
||||
"language_override": "[%key:common::config_flow::data::language%]"
|
||||
},
|
||||
"data_description": {
|
||||
"language_override": "[%key:component::irm_kmi::common::language_override_description%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
18
homeassistant/components/irm_kmi/utils.py
Normal file
18
homeassistant/components/irm_kmi/utils.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""Helper functions for use with IRM KMI integration."""
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import CONF_LANGUAGE_OVERRIDE, LANGS
|
||||
|
||||
|
||||
def preferred_language(hass: HomeAssistant, config_entry: ConfigEntry | None) -> str:
|
||||
"""Get the preferred language for the integration if it was overridden by the configuration."""
|
||||
|
||||
if (
|
||||
config_entry is None
|
||||
or config_entry.options.get(CONF_LANGUAGE_OVERRIDE) == "none"
|
||||
):
|
||||
return hass.config.language if hass.config.language in LANGS else "en"
|
||||
|
||||
return config_entry.options.get(CONF_LANGUAGE_OVERRIDE, "en")
|
158
homeassistant/components/irm_kmi/weather.py
Normal file
158
homeassistant/components/irm_kmi/weather.py
Normal file
@@ -0,0 +1,158 @@
|
||||
"""Support for IRM KMI weather."""
|
||||
|
||||
from irm_kmi_api import CurrentWeatherData
|
||||
|
||||
from homeassistant.components.weather import (
|
||||
Forecast,
|
||||
SingleCoordinatorWeatherEntity,
|
||||
WeatherEntityFeature,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONF_UNIQUE_ID,
|
||||
UnitOfPrecipitationDepth,
|
||||
UnitOfPressure,
|
||||
UnitOfSpeed,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import IrmKmiConfigEntry, IrmKmiCoordinator
|
||||
from .entity import IrmKmiBaseEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
_hass: HomeAssistant,
|
||||
entry: IrmKmiConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the weather entry."""
|
||||
async_add_entities([IrmKmiWeather(entry)])
|
||||
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
class IrmKmiWeather(
|
||||
IrmKmiBaseEntity, # WeatherEntity
|
||||
SingleCoordinatorWeatherEntity[IrmKmiCoordinator],
|
||||
):
|
||||
"""Weather entity for IRM KMI weather."""
|
||||
|
||||
_attr_name = None
|
||||
_attr_supported_features = (
|
||||
WeatherEntityFeature.FORECAST_DAILY
|
||||
| WeatherEntityFeature.FORECAST_TWICE_DAILY
|
||||
| WeatherEntityFeature.FORECAST_HOURLY
|
||||
)
|
||||
_attr_native_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_native_wind_speed_unit = UnitOfSpeed.KILOMETERS_PER_HOUR
|
||||
_attr_native_precipitation_unit = UnitOfPrecipitationDepth.MILLIMETERS
|
||||
_attr_native_pressure_unit = UnitOfPressure.HPA
|
||||
|
||||
def __init__(self, entry: IrmKmiConfigEntry) -> None:
|
||||
"""Create a new instance of the weather entity from a configuration entry."""
|
||||
IrmKmiBaseEntity.__init__(self, entry)
|
||||
SingleCoordinatorWeatherEntity.__init__(self, entry.runtime_data)
|
||||
self._attr_unique_id = entry.data[CONF_UNIQUE_ID]
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return super().available
|
||||
|
||||
@property
|
||||
def current_weather(self) -> CurrentWeatherData:
|
||||
"""Return the current weather."""
|
||||
return self.coordinator.data.current_weather
|
||||
|
||||
@property
|
||||
def condition(self) -> str | None:
|
||||
"""Return the current condition."""
|
||||
return self.current_weather.get("condition")
|
||||
|
||||
@property
|
||||
def native_temperature(self) -> float | None:
|
||||
"""Return the temperature in native units."""
|
||||
return self.current_weather.get("temperature")
|
||||
|
||||
@property
|
||||
def native_wind_speed(self) -> float | None:
|
||||
"""Return the wind speed in native units."""
|
||||
return self.current_weather.get("wind_speed")
|
||||
|
||||
@property
|
||||
def native_wind_gust_speed(self) -> float | None:
|
||||
"""Return the wind gust speed in native units."""
|
||||
return self.current_weather.get("wind_gust_speed")
|
||||
|
||||
@property
|
||||
def wind_bearing(self) -> float | str | None:
|
||||
"""Return the wind bearing."""
|
||||
return self.current_weather.get("wind_bearing")
|
||||
|
||||
@property
|
||||
def native_pressure(self) -> float | None:
|
||||
"""Return the pressure in native units."""
|
||||
return self.current_weather.get("pressure")
|
||||
|
||||
@property
|
||||
def uv_index(self) -> float | None:
|
||||
"""Return the UV index."""
|
||||
return self.current_weather.get("uv_index")
|
||||
|
||||
def _async_forecast_twice_daily(self) -> list[Forecast] | None:
|
||||
"""Return the daily forecast in native units."""
|
||||
return self.coordinator.data.daily_forecast
|
||||
|
||||
def _async_forecast_daily(self) -> list[Forecast] | None:
|
||||
"""Return the daily forecast in native units."""
|
||||
return self.daily_forecast()
|
||||
|
||||
def _async_forecast_hourly(self) -> list[Forecast] | None:
|
||||
"""Return the hourly forecast in native units."""
|
||||
return self.coordinator.data.hourly_forecast
|
||||
|
||||
def daily_forecast(self) -> list[Forecast] | None:
|
||||
"""Return the daily forecast in native units."""
|
||||
data: list[Forecast] = self.coordinator.data.daily_forecast
|
||||
|
||||
# The data in daily_forecast might contain nighttime forecast.
|
||||
# The following handle the lowest temperature attribute to be displayed correctly.
|
||||
if (
|
||||
len(data) > 1
|
||||
and not data[0].get("is_daytime")
|
||||
and data[1].get("native_templow") is None
|
||||
):
|
||||
data[1]["native_templow"] = data[0].get("native_templow")
|
||||
if (
|
||||
data[1]["native_templow"] is not None
|
||||
and data[1]["native_temperature"] is not None
|
||||
and data[1]["native_templow"] > data[1]["native_temperature"]
|
||||
):
|
||||
(data[1]["native_templow"], data[1]["native_temperature"]) = (
|
||||
data[1]["native_temperature"],
|
||||
data[1]["native_templow"],
|
||||
)
|
||||
|
||||
if len(data) > 0 and not data[0].get("is_daytime"):
|
||||
return data
|
||||
|
||||
if (
|
||||
len(data) > 1
|
||||
and data[0].get("native_templow") is None
|
||||
and not data[1].get("is_daytime")
|
||||
):
|
||||
data[0]["native_templow"] = data[1].get("native_templow")
|
||||
if (
|
||||
data[0]["native_templow"] is not None
|
||||
and data[0]["native_temperature"] is not None
|
||||
and data[0]["native_templow"] > data[0]["native_temperature"]
|
||||
):
|
||||
(data[0]["native_templow"], data[0]["native_temperature"]) = (
|
||||
data[0]["native_temperature"],
|
||||
data[0]["native_templow"],
|
||||
)
|
||||
|
||||
return [f for f in data if f.get("is_daytime")]
|
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyecotrend_ista"],
|
||||
"quality_scale": "gold",
|
||||
"requirements": ["pyecotrend-ista==3.3.1"]
|
||||
"requirements": ["pyecotrend-ista==3.4.0"]
|
||||
}
|
||||
|
@@ -9,5 +9,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pypck"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pypck==0.8.10", "lcn-frontend==0.2.7"]
|
||||
"requirements": ["pypck==0.8.12", "lcn-frontend==0.2.7"]
|
||||
}
|
||||
|
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from datetime import time
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
@@ -70,6 +71,9 @@ class DeviceDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
event_filter=self.async_config_update_filter,
|
||||
)
|
||||
)
|
||||
# Time of day for fetching the device's energy usage
|
||||
# (randomly assigned when device is first created in Home Assistant)
|
||||
self.update_energy_at_time_of_day: time | None = None
|
||||
|
||||
async def _handle_update_config(self, _: Event) -> None:
|
||||
"""Handle update core config."""
|
||||
|
@@ -34,6 +34,7 @@ class ThinQEntity(CoordinatorEntity[DeviceDataUpdateCoordinator]):
|
||||
coordinator: DeviceDataUpdateCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
property_id: str,
|
||||
postfix_id: str | None = None,
|
||||
) -> None:
|
||||
"""Initialize an entity."""
|
||||
super().__init__(coordinator)
|
||||
@@ -48,7 +49,11 @@ class ThinQEntity(CoordinatorEntity[DeviceDataUpdateCoordinator]):
|
||||
model=f"{coordinator.api.device.model_name} ({self.coordinator.api.device.device_type})",
|
||||
name=coordinator.device_name,
|
||||
)
|
||||
self._attr_unique_id = f"{coordinator.unique_id}_{self.property_id}"
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator.unique_id}_{self.property_id}"
|
||||
if postfix_id is None
|
||||
else f"{coordinator.unique_id}_{self.property_id}_{postfix_id}"
|
||||
)
|
||||
if self.location is not None and self.location not in (
|
||||
Location.MAIN,
|
||||
Location.OVEN,
|
||||
|
@@ -440,6 +440,15 @@
|
||||
},
|
||||
"cycle_count_for_location": {
|
||||
"default": "mdi:counter"
|
||||
},
|
||||
"energy_usage_yesterday": {
|
||||
"default": "mdi:chart-bar"
|
||||
},
|
||||
"energy_usage_this_month": {
|
||||
"default": "mdi:chart-bar"
|
||||
},
|
||||
"energy_usage_last_month": {
|
||||
"default": "mdi:chart-bar"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -2,10 +2,13 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, time, timedelta
|
||||
import logging
|
||||
import random
|
||||
|
||||
from thinqconnect import DeviceType
|
||||
from thinqconnect import USAGE_DAILY, USAGE_MONTHLY, DeviceType, ThinQAPIException
|
||||
from thinqconnect.devices.const import Property as ThinQProperty
|
||||
from thinqconnect.integration import ActiveMode, ThinQPropertyEx, TimerProperty
|
||||
|
||||
@@ -18,11 +21,13 @@ from homeassistant.components.sensor import (
|
||||
from homeassistant.const import (
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
PERCENTAGE,
|
||||
UnitOfEnergy,
|
||||
UnitOfTemperature,
|
||||
UnitOfTime,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.event import async_track_point_in_time
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import ThinqConfigEntry
|
||||
@@ -553,6 +558,44 @@ DEVICE_TYPE_SENSOR_MAP: dict[DeviceType, tuple[SensorEntityDescription, ...]] =
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class ThinQEnergySensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes ThinQ energy sensor entity."""
|
||||
|
||||
device_class = SensorDeviceClass.ENERGY
|
||||
state_class = SensorStateClass.TOTAL
|
||||
native_unit_of_measurement = UnitOfEnergy.WATT_HOUR
|
||||
suggested_display_precision = 0
|
||||
usage_period: str
|
||||
start_date_fn: Callable[[datetime], datetime]
|
||||
end_date_fn: Callable[[datetime], datetime]
|
||||
update_interval: timedelta = timedelta(days=1)
|
||||
|
||||
|
||||
ENERGY_USAGE_SENSORS: tuple[ThinQEnergySensorEntityDescription, ...] = (
|
||||
ThinQEnergySensorEntityDescription(
|
||||
key="yesterday",
|
||||
translation_key="energy_usage_yesterday",
|
||||
usage_period=USAGE_DAILY,
|
||||
start_date_fn=lambda today: today - timedelta(days=1),
|
||||
end_date_fn=lambda today: today - timedelta(days=1),
|
||||
),
|
||||
ThinQEnergySensorEntityDescription(
|
||||
key="this_month",
|
||||
translation_key="energy_usage_this_month",
|
||||
usage_period=USAGE_MONTHLY,
|
||||
start_date_fn=lambda today: today,
|
||||
end_date_fn=lambda today: today,
|
||||
),
|
||||
ThinQEnergySensorEntityDescription(
|
||||
key="last_month",
|
||||
translation_key="energy_usage_last_month",
|
||||
usage_period=USAGE_MONTHLY,
|
||||
start_date_fn=lambda today: today.replace(day=1) - timedelta(days=1),
|
||||
end_date_fn=lambda today: today.replace(day=1) - timedelta(days=1),
|
||||
),
|
||||
)
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -562,7 +605,7 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up an entry for sensor platform."""
|
||||
entities: list[ThinQSensorEntity] = []
|
||||
entities: list[ThinQSensorEntity | ThinQEnergySensorEntity] = []
|
||||
for coordinator in entry.runtime_data.coordinators.values():
|
||||
if (
|
||||
descriptions := DEVICE_TYPE_SENSOR_MAP.get(
|
||||
@@ -584,7 +627,23 @@ async def async_setup_entry(
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
for energy_description in ENERGY_USAGE_SENSORS:
|
||||
entities.extend(
|
||||
ThinQEnergySensorEntity(
|
||||
coordinator=coordinator,
|
||||
entity_description=energy_description,
|
||||
property_id=energy_property_id,
|
||||
postfix_id=energy_description.key,
|
||||
)
|
||||
for energy_property_id in coordinator.api.get_active_idx(
|
||||
(
|
||||
ThinQPropertyEx.ENERGY_USAGE
|
||||
if coordinator.sub_id is None
|
||||
else f"{ThinQPropertyEx.ENERGY_USAGE}_{coordinator.sub_id}"
|
||||
),
|
||||
ActiveMode.READ_ONLY,
|
||||
)
|
||||
)
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
|
||||
@@ -686,3 +745,84 @@ class ThinQSensorEntity(ThinQEntity, SensorEntity):
|
||||
if unit == UnitOfTime.SECONDS:
|
||||
return (data.hour * 3600) + (data.minute * 60) + data.second
|
||||
return 0
|
||||
|
||||
|
||||
class ThinQEnergySensorEntity(ThinQEntity, SensorEntity):
|
||||
"""Represent a ThinQ energy sensor platform."""
|
||||
|
||||
entity_description: ThinQEnergySensorEntityDescription
|
||||
_stop_update: Callable[[], None] | None = None
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle added to Hass."""
|
||||
await super().async_added_to_hass()
|
||||
if self.coordinator.update_energy_at_time_of_day is None:
|
||||
# random time 01:00:00 ~ 02:59:00
|
||||
self.coordinator.update_energy_at_time_of_day = time(
|
||||
hour=random.randint(1, 2), minute=random.randint(0, 59)
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"[%s] Set energy update time: %s",
|
||||
self.coordinator.device_name,
|
||||
self.coordinator.update_energy_at_time_of_day,
|
||||
)
|
||||
|
||||
await self._async_update_and_schedule()
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
if self._stop_update is not None:
|
||||
self._stop_update()
|
||||
return await super().async_will_remove_from_hass()
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return super().available or self.native_value is not None
|
||||
|
||||
async def async_update(self, now: datetime | None = None) -> None:
|
||||
"""Update the state of the sensor."""
|
||||
await self._async_update_and_schedule()
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def _async_update_and_schedule(self) -> None:
|
||||
"""Update the state of the sensor."""
|
||||
local_now = datetime.now(
|
||||
dt_util.get_time_zone(self.coordinator.hass.config.time_zone)
|
||||
)
|
||||
next_update = local_now + self.entity_description.update_interval
|
||||
if self.coordinator.update_energy_at_time_of_day is not None:
|
||||
# calculate next_update time by combining tomorrow and update_energy_at_time_of_day
|
||||
next_update = datetime.combine(
|
||||
(next_update).date(),
|
||||
self.coordinator.update_energy_at_time_of_day,
|
||||
next_update.tzinfo,
|
||||
)
|
||||
try:
|
||||
self._attr_native_value = await self.coordinator.api.async_get_energy_usage(
|
||||
energy_property=self.property_id,
|
||||
period=self.entity_description.usage_period,
|
||||
start_date=(self.entity_description.start_date_fn(local_now)).date(),
|
||||
end_date=(self.entity_description.end_date_fn(local_now)).date(),
|
||||
detail=False,
|
||||
)
|
||||
except ThinQAPIException as exc:
|
||||
_LOGGER.warning(
|
||||
"[%s:%s] Failed to fetch energy usage data. reason=%s",
|
||||
self.coordinator.device_name,
|
||||
self.entity_description.key,
|
||||
exc,
|
||||
)
|
||||
finally:
|
||||
_LOGGER.debug(
|
||||
"[%s:%s] async_update_and_schedule next_update: %s, native_value: %s",
|
||||
self.coordinator.device_name,
|
||||
self.entity_description.key,
|
||||
next_update,
|
||||
self._attr_native_value,
|
||||
)
|
||||
self._stop_update = async_track_point_in_time(
|
||||
self.coordinator.hass,
|
||||
self.async_update,
|
||||
next_update,
|
||||
)
|
||||
|
@@ -923,6 +923,15 @@
|
||||
},
|
||||
"cycle_count_for_location": {
|
||||
"name": "{location} cycles"
|
||||
},
|
||||
"energy_usage_yesterday": {
|
||||
"name": "Energy yesterday"
|
||||
},
|
||||
"energy_usage_this_month": {
|
||||
"name": "Energy this month"
|
||||
},
|
||||
"energy_usage_last_month": {
|
||||
"name": "Energy last month"
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
|
@@ -2,30 +2,17 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from typing import Any, Protocol
|
||||
from typing import Protocol
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import frontend, websocket_api
|
||||
from homeassistant.components.media_player import (
|
||||
ATTR_MEDIA_CONTENT_ID,
|
||||
CONTENT_AUTH_EXPIRY_TIME,
|
||||
BrowseError,
|
||||
BrowseMedia,
|
||||
async_process_play_media_url,
|
||||
)
|
||||
from homeassistant.components.websocket_api import ActiveConnection
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.frame import report_usage
|
||||
from homeassistant.helpers.integration_platform import (
|
||||
async_process_integration_platforms,
|
||||
)
|
||||
from homeassistant.helpers.typing import UNDEFINED, ConfigType, UndefinedType
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from . import local_source
|
||||
from . import http, local_source
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
MEDIA_CLASS_MAP,
|
||||
@@ -34,7 +21,8 @@ from .const import (
|
||||
URI_SCHEME,
|
||||
URI_SCHEME_REGEX,
|
||||
)
|
||||
from .error import MediaSourceError, UnknownMediaSource, Unresolvable
|
||||
from .error import MediaSourceError, Unresolvable
|
||||
from .helper import async_browse_media, async_resolve_media
|
||||
from .models import BrowseMediaSource, MediaSource, MediaSourceItem, PlayMedia
|
||||
|
||||
__all__ = [
|
||||
@@ -80,11 +68,7 @@ def generate_media_source_id(domain: str, identifier: str) -> str:
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the media_source component."""
|
||||
hass.data[MEDIA_SOURCE_DATA] = {}
|
||||
websocket_api.async_register_command(hass, websocket_browse_media)
|
||||
websocket_api.async_register_command(hass, websocket_resolve_media)
|
||||
frontend.async_register_built_in_panel(
|
||||
hass, "media-browser", "media_browser", "hass:play-box-multiple"
|
||||
)
|
||||
http.async_setup(hass)
|
||||
|
||||
# Local sources support
|
||||
await _process_media_source_platform(hass, DOMAIN, local_source)
|
||||
@@ -107,141 +91,3 @@ async def _process_media_source_platform(
|
||||
hass.data[MEDIA_SOURCE_DATA][domain] = source
|
||||
if isinstance(source, local_source.LocalSource):
|
||||
hass.http.register_view(local_source.LocalMediaView(hass, source))
|
||||
|
||||
|
||||
@callback
|
||||
def _get_media_item(
|
||||
hass: HomeAssistant, media_content_id: str | None, target_media_player: str | None
|
||||
) -> MediaSourceItem:
|
||||
"""Return media item."""
|
||||
if media_content_id:
|
||||
item = MediaSourceItem.from_uri(hass, media_content_id, target_media_player)
|
||||
else:
|
||||
# We default to our own domain if its only one registered
|
||||
domain = None if len(hass.data[MEDIA_SOURCE_DATA]) > 1 else DOMAIN
|
||||
return MediaSourceItem(hass, domain, "", target_media_player)
|
||||
|
||||
if item.domain is not None and item.domain not in hass.data[MEDIA_SOURCE_DATA]:
|
||||
raise UnknownMediaSource(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="unknown_media_source",
|
||||
translation_placeholders={"domain": item.domain},
|
||||
)
|
||||
|
||||
return item
|
||||
|
||||
|
||||
@bind_hass
|
||||
async def async_browse_media(
|
||||
hass: HomeAssistant,
|
||||
media_content_id: str | None,
|
||||
*,
|
||||
content_filter: Callable[[BrowseMedia], bool] | None = None,
|
||||
) -> BrowseMediaSource:
|
||||
"""Return media player browse media results."""
|
||||
if DOMAIN not in hass.data:
|
||||
raise BrowseError("Media Source not loaded")
|
||||
|
||||
try:
|
||||
item = await _get_media_item(hass, media_content_id, None).async_browse()
|
||||
except ValueError as err:
|
||||
raise BrowseError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="browse_media_failed",
|
||||
translation_placeholders={
|
||||
"media_content_id": str(media_content_id),
|
||||
"error": str(err),
|
||||
},
|
||||
) from err
|
||||
|
||||
if content_filter is None or item.children is None:
|
||||
return item
|
||||
|
||||
old_count = len(item.children)
|
||||
item.children = [
|
||||
child for child in item.children if child.can_expand or content_filter(child)
|
||||
]
|
||||
item.not_shown += old_count - len(item.children)
|
||||
return item
|
||||
|
||||
|
||||
@bind_hass
|
||||
async def async_resolve_media(
|
||||
hass: HomeAssistant,
|
||||
media_content_id: str,
|
||||
target_media_player: str | None | UndefinedType = UNDEFINED,
|
||||
) -> PlayMedia:
|
||||
"""Get info to play media."""
|
||||
if DOMAIN not in hass.data:
|
||||
raise Unresolvable("Media Source not loaded")
|
||||
|
||||
if target_media_player is UNDEFINED:
|
||||
report_usage(
|
||||
"calls media_source.async_resolve_media without passing an entity_id",
|
||||
exclude_integrations={DOMAIN},
|
||||
)
|
||||
target_media_player = None
|
||||
|
||||
try:
|
||||
item = _get_media_item(hass, media_content_id, target_media_player)
|
||||
except ValueError as err:
|
||||
raise Unresolvable(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="resolve_media_failed",
|
||||
translation_placeholders={
|
||||
"media_content_id": str(media_content_id),
|
||||
"error": str(err),
|
||||
},
|
||||
) from err
|
||||
|
||||
return await item.async_resolve()
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "media_source/browse_media",
|
||||
vol.Optional(ATTR_MEDIA_CONTENT_ID, default=""): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def websocket_browse_media(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Browse available media."""
|
||||
try:
|
||||
media = await async_browse_media(hass, msg.get("media_content_id", ""))
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
media.as_dict(),
|
||||
)
|
||||
except BrowseError as err:
|
||||
connection.send_error(msg["id"], "browse_media_failed", str(err))
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "media_source/resolve_media",
|
||||
vol.Required(ATTR_MEDIA_CONTENT_ID): str,
|
||||
vol.Optional("expires", default=CONTENT_AUTH_EXPIRY_TIME): int,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def websocket_resolve_media(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Resolve media."""
|
||||
try:
|
||||
media = await async_resolve_media(hass, msg["media_content_id"], None)
|
||||
except Unresolvable as err:
|
||||
connection.send_error(msg["id"], "resolve_media_failed", str(err))
|
||||
return
|
||||
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{
|
||||
"url": async_process_play_media_url(
|
||||
hass, media.url, allow_relative_url=True
|
||||
),
|
||||
"mime_type": media.mime_type,
|
||||
},
|
||||
)
|
||||
|
103
homeassistant/components/media_source/helper.py
Normal file
103
homeassistant/components/media_source/helper.py
Normal file
@@ -0,0 +1,103 @@
|
||||
"""Helpers for media source."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
|
||||
from homeassistant.components.media_player import BrowseError, BrowseMedia
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.frame import report_usage
|
||||
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
from homeassistant.loader import bind_hass
|
||||
|
||||
from .const import DOMAIN, MEDIA_SOURCE_DATA
|
||||
from .error import UnknownMediaSource, Unresolvable
|
||||
from .models import BrowseMediaSource, MediaSourceItem, PlayMedia
|
||||
|
||||
|
||||
@callback
|
||||
def _get_media_item(
|
||||
hass: HomeAssistant, media_content_id: str | None, target_media_player: str | None
|
||||
) -> MediaSourceItem:
|
||||
"""Return media item."""
|
||||
if media_content_id:
|
||||
item = MediaSourceItem.from_uri(hass, media_content_id, target_media_player)
|
||||
else:
|
||||
# We default to our own domain if its only one registered
|
||||
domain = None if len(hass.data[MEDIA_SOURCE_DATA]) > 1 else DOMAIN
|
||||
return MediaSourceItem(hass, domain, "", target_media_player)
|
||||
|
||||
if item.domain is not None and item.domain not in hass.data[MEDIA_SOURCE_DATA]:
|
||||
raise UnknownMediaSource(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="unknown_media_source",
|
||||
translation_placeholders={"domain": item.domain},
|
||||
)
|
||||
|
||||
return item
|
||||
|
||||
|
||||
@bind_hass
|
||||
async def async_browse_media(
|
||||
hass: HomeAssistant,
|
||||
media_content_id: str | None,
|
||||
*,
|
||||
content_filter: Callable[[BrowseMedia], bool] | None = None,
|
||||
) -> BrowseMediaSource:
|
||||
"""Return media player browse media results."""
|
||||
if DOMAIN not in hass.data:
|
||||
raise BrowseError("Media Source not loaded")
|
||||
|
||||
try:
|
||||
item = await _get_media_item(hass, media_content_id, None).async_browse()
|
||||
except ValueError as err:
|
||||
raise BrowseError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="browse_media_failed",
|
||||
translation_placeholders={
|
||||
"media_content_id": str(media_content_id),
|
||||
"error": str(err),
|
||||
},
|
||||
) from err
|
||||
|
||||
if content_filter is None or item.children is None:
|
||||
return item
|
||||
|
||||
old_count = len(item.children)
|
||||
item.children = [
|
||||
child for child in item.children if child.can_expand or content_filter(child)
|
||||
]
|
||||
item.not_shown += old_count - len(item.children)
|
||||
return item
|
||||
|
||||
|
||||
@bind_hass
|
||||
async def async_resolve_media(
|
||||
hass: HomeAssistant,
|
||||
media_content_id: str,
|
||||
target_media_player: str | None | UndefinedType = UNDEFINED,
|
||||
) -> PlayMedia:
|
||||
"""Get info to play media."""
|
||||
if DOMAIN not in hass.data:
|
||||
raise Unresolvable("Media Source not loaded")
|
||||
|
||||
if target_media_player is UNDEFINED:
|
||||
report_usage(
|
||||
"calls media_source.async_resolve_media without passing an entity_id",
|
||||
exclude_integrations={DOMAIN},
|
||||
)
|
||||
target_media_player = None
|
||||
|
||||
try:
|
||||
item = _get_media_item(hass, media_content_id, target_media_player)
|
||||
except ValueError as err:
|
||||
raise Unresolvable(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="resolve_media_failed",
|
||||
translation_placeholders={
|
||||
"media_content_id": str(media_content_id),
|
||||
"error": str(err),
|
||||
},
|
||||
) from err
|
||||
|
||||
return await item.async_resolve()
|
79
homeassistant/components/media_source/http.py
Normal file
79
homeassistant/components/media_source/http.py
Normal file
@@ -0,0 +1,79 @@
|
||||
"""HTTP views and WebSocket commands for media sources."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import frontend, websocket_api
|
||||
from homeassistant.components.media_player import (
|
||||
ATTR_MEDIA_CONTENT_ID,
|
||||
CONTENT_AUTH_EXPIRY_TIME,
|
||||
BrowseError,
|
||||
async_process_play_media_url,
|
||||
)
|
||||
from homeassistant.components.websocket_api import ActiveConnection
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .error import Unresolvable
|
||||
from .helper import async_browse_media, async_resolve_media
|
||||
|
||||
|
||||
def async_setup(hass: HomeAssistant) -> None:
|
||||
"""Set up the HTTP views and WebSocket commands for media sources."""
|
||||
websocket_api.async_register_command(hass, websocket_browse_media)
|
||||
websocket_api.async_register_command(hass, websocket_resolve_media)
|
||||
frontend.async_register_built_in_panel(
|
||||
hass, "media-browser", "media_browser", "hass:play-box-multiple"
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "media_source/browse_media",
|
||||
vol.Optional(ATTR_MEDIA_CONTENT_ID, default=""): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def websocket_browse_media(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Browse available media."""
|
||||
try:
|
||||
media = await async_browse_media(hass, msg.get("media_content_id", ""))
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
media.as_dict(),
|
||||
)
|
||||
except BrowseError as err:
|
||||
connection.send_error(msg["id"], "browse_media_failed", str(err))
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "media_source/resolve_media",
|
||||
vol.Required(ATTR_MEDIA_CONTENT_ID): str,
|
||||
vol.Optional("expires", default=CONTENT_AUTH_EXPIRY_TIME): int,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def websocket_resolve_media(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Resolve media."""
|
||||
try:
|
||||
media = await async_resolve_media(hass, msg["media_content_id"], None)
|
||||
except Unresolvable as err:
|
||||
connection.send_error(msg["id"], "resolve_media_failed", str(err))
|
||||
return
|
||||
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{
|
||||
"url": async_process_play_media_url(
|
||||
hass, media.url, allow_relative_url=True
|
||||
),
|
||||
"mime_type": media.mime_type,
|
||||
},
|
||||
)
|
@@ -21,6 +21,7 @@ from homeassistant.const import (
|
||||
PERCENTAGE,
|
||||
UV_INDEX,
|
||||
UnitOfLength,
|
||||
UnitOfPressure,
|
||||
UnitOfSpeed,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
@@ -160,6 +161,16 @@ SENSOR_TYPES: tuple[MetOfficeSensorEntityDescription, ...] = (
|
||||
icon=None,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
MetOfficeSensorEntityDescription(
|
||||
key="pressure",
|
||||
native_attr_name="mslp",
|
||||
name="Pressure",
|
||||
device_class=SensorDeviceClass.PRESSURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfPressure.PA,
|
||||
suggested_unit_of_measurement=UnitOfPressure.HPA,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
@@ -167,174 +167,259 @@ PROCESS_ACTIONS = {
|
||||
"stop_supercooling": MieleActions.STOP_SUPERCOOL,
|
||||
}
|
||||
|
||||
STATE_PROGRAM_PHASE_WASHING_MACHINE = {
|
||||
0: "not_running", # Returned by the API when the machine is switched off entirely.
|
||||
256: "not_running",
|
||||
257: "pre_wash",
|
||||
258: "soak",
|
||||
259: "pre_wash",
|
||||
260: "main_wash",
|
||||
261: "rinse",
|
||||
262: "rinse_hold",
|
||||
263: "cleaning",
|
||||
264: "cooling_down",
|
||||
265: "drain",
|
||||
266: "spin",
|
||||
267: "anti_crease",
|
||||
268: "finished",
|
||||
269: "venting",
|
||||
270: "starch_stop",
|
||||
271: "freshen_up_and_moisten",
|
||||
272: "steam_smoothing",
|
||||
279: "hygiene",
|
||||
280: "drying",
|
||||
285: "disinfecting",
|
||||
295: "steam_smoothing",
|
||||
65535: "not_running", # Seems to be default for some devices.
|
||||
}
|
||||
|
||||
STATE_PROGRAM_PHASE_TUMBLE_DRYER = {
|
||||
0: "not_running",
|
||||
512: "not_running",
|
||||
513: "program_running",
|
||||
514: "drying",
|
||||
515: "machine_iron",
|
||||
516: "hand_iron_2",
|
||||
517: "normal",
|
||||
518: "normal_plus",
|
||||
519: "cooling_down",
|
||||
520: "hand_iron_1",
|
||||
521: "anti_crease",
|
||||
522: "finished",
|
||||
523: "extra_dry",
|
||||
524: "hand_iron",
|
||||
526: "moisten",
|
||||
527: "thermo_spin",
|
||||
528: "timed_drying",
|
||||
529: "warm_air",
|
||||
530: "steam_smoothing",
|
||||
531: "comfort_cooling",
|
||||
532: "rinse_out_lint",
|
||||
533: "rinses",
|
||||
535: "not_running",
|
||||
534: "smoothing",
|
||||
536: "not_running",
|
||||
537: "not_running",
|
||||
538: "slightly_dry",
|
||||
539: "safety_cooling",
|
||||
65535: "not_running",
|
||||
}
|
||||
class ProgramPhaseWashingMachine(MieleEnum, missing_to_none=True):
|
||||
"""Program phase codes for washing machines."""
|
||||
|
||||
STATE_PROGRAM_PHASE_DISHWASHER = {
|
||||
1792: "not_running",
|
||||
1793: "reactivating",
|
||||
1794: "pre_dishwash",
|
||||
1795: "main_dishwash",
|
||||
1796: "rinse",
|
||||
1797: "interim_rinse",
|
||||
1798: "final_rinse",
|
||||
1799: "drying",
|
||||
1800: "finished",
|
||||
1801: "pre_dishwash",
|
||||
65535: "not_running",
|
||||
}
|
||||
not_running = 0, 256, 65535
|
||||
pre_wash = 257, 259
|
||||
soak = 258
|
||||
main_wash = 260
|
||||
rinse = 261
|
||||
rinse_hold = 262
|
||||
cleaning = 263
|
||||
cooling_down = 264
|
||||
drain = 265
|
||||
spin = 266
|
||||
anti_crease = 267
|
||||
finished = 268
|
||||
venting = 269
|
||||
starch_stop = 270
|
||||
freshen_up_and_moisten = 271
|
||||
steam_smoothing = 272, 295
|
||||
hygiene = 279
|
||||
drying = 280
|
||||
disinfecting = 285
|
||||
|
||||
STATE_PROGRAM_PHASE_OVEN = {
|
||||
0: "not_running",
|
||||
3073: "heating_up",
|
||||
3074: "process_running",
|
||||
3078: "process_finished",
|
||||
3084: "energy_save",
|
||||
65535: "not_running",
|
||||
}
|
||||
STATE_PROGRAM_PHASE_WARMING_DRAWER = {
|
||||
0: "not_running",
|
||||
3073: "heating_up",
|
||||
3075: "door_open",
|
||||
3094: "keeping_warm",
|
||||
3088: "cooling_down",
|
||||
65535: "not_running",
|
||||
}
|
||||
STATE_PROGRAM_PHASE_MICROWAVE = {
|
||||
0: "not_running",
|
||||
3329: "heating",
|
||||
3330: "process_running",
|
||||
3334: "process_finished",
|
||||
3340: "energy_save",
|
||||
65535: "not_running",
|
||||
}
|
||||
STATE_PROGRAM_PHASE_COFFEE_SYSTEM = {
|
||||
# Coffee system
|
||||
3073: "heating_up",
|
||||
4352: "not_running",
|
||||
4353: "espresso",
|
||||
4355: "milk_foam",
|
||||
4361: "dispensing",
|
||||
4369: "pre_brewing",
|
||||
4377: "grinding",
|
||||
4401: "2nd_grinding",
|
||||
4354: "hot_milk",
|
||||
4393: "2nd_pre_brewing",
|
||||
4385: "2nd_espresso",
|
||||
4404: "dispensing",
|
||||
4405: "rinse",
|
||||
65535: "not_running",
|
||||
}
|
||||
STATE_PROGRAM_PHASE_ROBOT_VACUUM_CLEANER = {
|
||||
0: "not_running",
|
||||
5889: "vacuum_cleaning",
|
||||
5890: "returning",
|
||||
5891: "vacuum_cleaning_paused",
|
||||
5892: "going_to_target_area",
|
||||
5893: "wheel_lifted", # F1
|
||||
5894: "dirty_sensors", # F2
|
||||
5895: "dust_box_missing", # F3
|
||||
5896: "blocked_drive_wheels", # F4
|
||||
5897: "blocked_brushes", # F5
|
||||
5898: "motor_overload", # F6
|
||||
5899: "internal_fault", # F7
|
||||
5900: "blocked_front_wheel", # F8
|
||||
5903: "docked",
|
||||
5904: "docked",
|
||||
5910: "remote_controlled",
|
||||
65535: "not_running",
|
||||
}
|
||||
STATE_PROGRAM_PHASE_STEAM_OVEN = {
|
||||
0: "not_running",
|
||||
3863: "steam_reduction",
|
||||
7938: "process_running",
|
||||
7939: "waiting_for_start",
|
||||
7940: "heating_up_phase",
|
||||
7942: "process_finished",
|
||||
65535: "not_running",
|
||||
}
|
||||
|
||||
STATE_PROGRAM_PHASE: dict[int, dict[int, str]] = {
|
||||
MieleAppliance.WASHING_MACHINE: STATE_PROGRAM_PHASE_WASHING_MACHINE,
|
||||
MieleAppliance.WASHING_MACHINE_SEMI_PROFESSIONAL: STATE_PROGRAM_PHASE_WASHING_MACHINE,
|
||||
MieleAppliance.WASHING_MACHINE_PROFESSIONAL: STATE_PROGRAM_PHASE_WASHING_MACHINE,
|
||||
MieleAppliance.TUMBLE_DRYER: STATE_PROGRAM_PHASE_TUMBLE_DRYER,
|
||||
MieleAppliance.DRYER_PROFESSIONAL: STATE_PROGRAM_PHASE_TUMBLE_DRYER,
|
||||
MieleAppliance.TUMBLE_DRYER_SEMI_PROFESSIONAL: STATE_PROGRAM_PHASE_TUMBLE_DRYER,
|
||||
MieleAppliance.WASHER_DRYER: STATE_PROGRAM_PHASE_WASHING_MACHINE
|
||||
| STATE_PROGRAM_PHASE_TUMBLE_DRYER,
|
||||
MieleAppliance.DISHWASHER: STATE_PROGRAM_PHASE_DISHWASHER,
|
||||
MieleAppliance.DISHWASHER_SEMI_PROFESSIONAL: STATE_PROGRAM_PHASE_DISHWASHER,
|
||||
MieleAppliance.DISHWASHER_PROFESSIONAL: STATE_PROGRAM_PHASE_DISHWASHER,
|
||||
MieleAppliance.OVEN: STATE_PROGRAM_PHASE_OVEN,
|
||||
MieleAppliance.OVEN_MICROWAVE: STATE_PROGRAM_PHASE_MICROWAVE,
|
||||
MieleAppliance.STEAM_OVEN: STATE_PROGRAM_PHASE_STEAM_OVEN,
|
||||
MieleAppliance.STEAM_OVEN_COMBI: STATE_PROGRAM_PHASE_OVEN
|
||||
| STATE_PROGRAM_PHASE_STEAM_OVEN,
|
||||
MieleAppliance.STEAM_OVEN_MICRO: STATE_PROGRAM_PHASE_MICROWAVE
|
||||
| STATE_PROGRAM_PHASE_STEAM_OVEN,
|
||||
MieleAppliance.STEAM_OVEN_MK2: STATE_PROGRAM_PHASE_OVEN
|
||||
| STATE_PROGRAM_PHASE_STEAM_OVEN,
|
||||
MieleAppliance.DIALOG_OVEN: STATE_PROGRAM_PHASE_OVEN,
|
||||
MieleAppliance.MICROWAVE: STATE_PROGRAM_PHASE_MICROWAVE,
|
||||
MieleAppliance.COFFEE_SYSTEM: STATE_PROGRAM_PHASE_COFFEE_SYSTEM,
|
||||
MieleAppliance.ROBOT_VACUUM_CLEANER: STATE_PROGRAM_PHASE_ROBOT_VACUUM_CLEANER,
|
||||
MieleAppliance.DISH_WARMER: STATE_PROGRAM_PHASE_WARMING_DRAWER,
|
||||
class ProgramPhaseTumbleDryer(MieleEnum, missing_to_none=True):
|
||||
"""Program phase codes for tumble dryers."""
|
||||
|
||||
not_running = 0, 512, 535, 536, 537, 65535
|
||||
program_running = 513
|
||||
drying = 514
|
||||
machine_iron = 515
|
||||
hand_iron_2 = 516
|
||||
normal = 517
|
||||
normal_plus = 518
|
||||
cooling_down = 519
|
||||
hand_iron_1 = 520
|
||||
anti_crease = 521
|
||||
finished = 522
|
||||
extra_dry = 523
|
||||
hand_iron = 524
|
||||
moisten = 526
|
||||
thermo_spin = 527
|
||||
timed_drying = 528
|
||||
warm_air = 529
|
||||
steam_smoothing = 530
|
||||
comfort_cooling = 531
|
||||
rinse_out_lint = 532
|
||||
rinses = 533
|
||||
smoothing = 534
|
||||
slightly_dry = 538
|
||||
safety_cooling = 539
|
||||
|
||||
|
||||
class ProgramPhaseWasherDryer(MieleEnum, missing_to_none=True):
|
||||
"""Program phase codes for washer/dryer machines."""
|
||||
|
||||
not_running = 0, 256, 512, 535, 536, 537, 65535
|
||||
pre_wash = 257, 259
|
||||
soak = 258
|
||||
main_wash = 260
|
||||
rinse = 261
|
||||
rinse_hold = 262
|
||||
cleaning = 263
|
||||
cooling_down = 264, 519
|
||||
drain = 265
|
||||
spin = 266
|
||||
anti_crease = 267, 521
|
||||
finished = 268, 522
|
||||
venting = 269
|
||||
starch_stop = 270
|
||||
freshen_up_and_moisten = 271
|
||||
steam_smoothing = 272, 295, 530
|
||||
hygiene = 279
|
||||
drying = 280, 514
|
||||
disinfecting = 285
|
||||
|
||||
program_running = 513
|
||||
machine_iron = 515
|
||||
hand_iron_2 = 516
|
||||
normal = 517
|
||||
normal_plus = 518
|
||||
hand_iron_1 = 520
|
||||
extra_dry = 523
|
||||
hand_iron = 524
|
||||
moisten = 526
|
||||
thermo_spin = 527
|
||||
timed_drying = 528
|
||||
warm_air = 529
|
||||
comfort_cooling = 531
|
||||
rinse_out_lint = 532
|
||||
rinses = 533
|
||||
smoothing = 534
|
||||
slightly_dry = 538
|
||||
safety_cooling = 539
|
||||
|
||||
|
||||
class ProgramPhaseDishwasher(MieleEnum, missing_to_none=True):
|
||||
"""Program phase codes for dishwashers."""
|
||||
|
||||
not_running = 0, 1792, 65535
|
||||
reactivating = 1793
|
||||
pre_dishwash = 1794, 1801
|
||||
main_dishwash = 1795
|
||||
rinse = 1796
|
||||
interim_rinse = 1797
|
||||
final_rinse = 1798
|
||||
drying = 1799
|
||||
finished = 1800
|
||||
|
||||
|
||||
class ProgramPhaseOven(MieleEnum, missing_to_none=True):
|
||||
"""Program phase codes for ovens."""
|
||||
|
||||
not_running = 0, 65535
|
||||
heating_up = 3073
|
||||
process_running = 3074
|
||||
process_finished = 3078
|
||||
energy_save = 3084
|
||||
pre_heating = 3099
|
||||
|
||||
|
||||
class ProgramPhaseWarmingDrawer(MieleEnum, missing_to_none=True):
|
||||
"""Program phase codes for warming drawers."""
|
||||
|
||||
not_running = 0, 65535
|
||||
heating_up = 3073
|
||||
door_open = 3075
|
||||
keeping_warm = 3094
|
||||
cooling_down = 3088
|
||||
|
||||
|
||||
class ProgramPhaseMicrowave(MieleEnum, missing_to_none=True):
|
||||
"""Program phase for microwave units."""
|
||||
|
||||
not_running = 0, 65535
|
||||
heating = 3329
|
||||
process_running = 3330
|
||||
process_finished = 3334
|
||||
energy_save = 3340
|
||||
|
||||
|
||||
class ProgramPhaseCoffeeSystem(MieleEnum, missing_to_none=True):
|
||||
"""Program phase codes for coffee systems."""
|
||||
|
||||
not_running = 0, 4352, 65535
|
||||
heating_up = 3073
|
||||
espresso = 4353
|
||||
hot_milk = 4354
|
||||
milk_foam = 4355
|
||||
dispensing = 4361, 4404
|
||||
pre_brewing = 4369
|
||||
grinding = 4377
|
||||
second_espresso = 4385
|
||||
second_pre_brewing = 4393
|
||||
second_grinding = 4401
|
||||
rinse = 4405
|
||||
|
||||
|
||||
class ProgramPhaseRobotVacuumCleaner(MieleEnum, missing_to_none=True):
|
||||
"""Program phase codes for robot vacuum cleaner."""
|
||||
|
||||
not_running = 0, 65535
|
||||
vacuum_cleaning = 5889
|
||||
returning = 5890
|
||||
vacuum_cleaning_paused = 5891
|
||||
going_to_target_area = 5892
|
||||
wheel_lifted = 5893 # F1
|
||||
dirty_sensors = 5894 # F2
|
||||
dust_box_missing = 5895 # F3
|
||||
blocked_drive_wheels = 5896 # F4
|
||||
blocked_brushes = 5897 # F5
|
||||
motor_overload = 5898 # F6
|
||||
internal_fault = 5899 # F7
|
||||
blocked_front_wheel = 5900 # F8
|
||||
docked = 5903, 5904
|
||||
remote_controlled = 5910
|
||||
|
||||
|
||||
class ProgramPhaseMicrowaveOvenCombo(MieleEnum, missing_to_none=True):
|
||||
"""Program phase codes for microwave oven combo."""
|
||||
|
||||
not_running = 0, 65535
|
||||
steam_reduction = 3863
|
||||
process_running = 7938
|
||||
waiting_for_start = 7939
|
||||
heating_up_phase = 7940
|
||||
process_finished = 7942
|
||||
|
||||
|
||||
class ProgramPhaseSteamOven(MieleEnum, missing_to_none=True):
|
||||
"""Program phase codes for steam ovens."""
|
||||
|
||||
not_running = 0, 65535
|
||||
steam_reduction = 3863
|
||||
process_running = 7938
|
||||
waiting_for_start = 7939
|
||||
heating_up_phase = 7940
|
||||
process_finished = 7942
|
||||
|
||||
|
||||
class ProgramPhaseSteamOvenCombi(MieleEnum, missing_to_none=True):
|
||||
"""Program phase codes for steam oven combi."""
|
||||
|
||||
not_running = 0, 65535
|
||||
heating_up = 3073
|
||||
process_running = 3074, 7938
|
||||
process_finished = 3078, 7942
|
||||
energy_save = 3084
|
||||
pre_heating = 3099
|
||||
|
||||
steam_reduction = 3863
|
||||
waiting_for_start = 7939
|
||||
heating_up_phase = 7940
|
||||
|
||||
|
||||
class ProgramPhaseSteamOvenMicro(MieleEnum, missing_to_none=True):
|
||||
"""Program phase codes for steam oven micro."""
|
||||
|
||||
not_running = 0, 65535
|
||||
|
||||
heating = 3329
|
||||
process_running = 3330, 7938, 7942
|
||||
process_finished = 3334
|
||||
energy_save = 3340
|
||||
|
||||
steam_reduction = 3863
|
||||
waiting_for_start = 7939
|
||||
heating_up_phase = 7940
|
||||
|
||||
|
||||
PROGRAM_PHASE: dict[int, type[MieleEnum]] = {
|
||||
MieleAppliance.WASHING_MACHINE: ProgramPhaseWashingMachine,
|
||||
MieleAppliance.WASHING_MACHINE_SEMI_PROFESSIONAL: ProgramPhaseWashingMachine,
|
||||
MieleAppliance.WASHING_MACHINE_PROFESSIONAL: ProgramPhaseWashingMachine,
|
||||
MieleAppliance.TUMBLE_DRYER: ProgramPhaseTumbleDryer,
|
||||
MieleAppliance.DRYER_PROFESSIONAL: ProgramPhaseTumbleDryer,
|
||||
MieleAppliance.TUMBLE_DRYER_SEMI_PROFESSIONAL: ProgramPhaseTumbleDryer,
|
||||
MieleAppliance.WASHER_DRYER: ProgramPhaseWasherDryer,
|
||||
MieleAppliance.DISHWASHER: ProgramPhaseDishwasher,
|
||||
MieleAppliance.DISHWASHER_SEMI_PROFESSIONAL: ProgramPhaseDishwasher,
|
||||
MieleAppliance.DISHWASHER_PROFESSIONAL: ProgramPhaseDishwasher,
|
||||
MieleAppliance.OVEN: ProgramPhaseOven,
|
||||
MieleAppliance.OVEN_MICROWAVE: ProgramPhaseMicrowaveOvenCombo,
|
||||
MieleAppliance.STEAM_OVEN: ProgramPhaseSteamOven,
|
||||
MieleAppliance.STEAM_OVEN_COMBI: ProgramPhaseSteamOvenCombi,
|
||||
MieleAppliance.STEAM_OVEN_MK2: ProgramPhaseSteamOvenCombi,
|
||||
MieleAppliance.STEAM_OVEN_MICRO: ProgramPhaseSteamOvenMicro,
|
||||
MieleAppliance.DIALOG_OVEN: ProgramPhaseOven,
|
||||
MieleAppliance.MICROWAVE: ProgramPhaseMicrowave,
|
||||
MieleAppliance.COFFEE_SYSTEM: ProgramPhaseCoffeeSystem,
|
||||
MieleAppliance.ROBOT_VACUUM_CLEANER: ProgramPhaseRobotVacuumCleaner,
|
||||
MieleAppliance.DISH_WARMER: ProgramPhaseWarmingDrawer,
|
||||
}
|
||||
|
||||
|
||||
|
@@ -35,8 +35,8 @@ from .const import (
|
||||
COFFEE_SYSTEM_PROFILE,
|
||||
DISABLED_TEMP_ENTITIES,
|
||||
DOMAIN,
|
||||
PROGRAM_PHASE,
|
||||
STATE_PROGRAM_ID,
|
||||
STATE_PROGRAM_PHASE,
|
||||
STATE_STATUS_TAGS,
|
||||
MieleAppliance,
|
||||
PlatePowerStep,
|
||||
@@ -851,29 +851,36 @@ class MieleStatusSensor(MieleSensor):
|
||||
return True
|
||||
|
||||
|
||||
# Some phases have names that are not valid python identifiers, so we need to translate
|
||||
# them in order to avoid breaking changes
|
||||
PROGRAM_PHASE_TRANSLATION = {
|
||||
"second_espresso": "2nd_espresso",
|
||||
"second_grinding": "2nd_grinding",
|
||||
"second_pre_brewing": "2nd_pre_brewing",
|
||||
}
|
||||
|
||||
|
||||
class MielePhaseSensor(MieleSensor):
|
||||
"""Representation of the program phase sensor."""
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
ret_val = STATE_PROGRAM_PHASE.get(self.device.device_type, {}).get(
|
||||
"""Return the state of the phase sensor."""
|
||||
program_phase = PROGRAM_PHASE[self.device.device_type](
|
||||
self.device.state_program_phase
|
||||
).name
|
||||
|
||||
return (
|
||||
PROGRAM_PHASE_TRANSLATION.get(program_phase, program_phase)
|
||||
if program_phase is not None
|
||||
else None
|
||||
)
|
||||
if ret_val is None:
|
||||
_LOGGER.debug(
|
||||
"Unknown program phase: %s on device type: %s",
|
||||
self.device.state_program_phase,
|
||||
self.device.device_type,
|
||||
)
|
||||
return ret_val
|
||||
|
||||
@property
|
||||
def options(self) -> list[str]:
|
||||
"""Return the options list for the actual device type."""
|
||||
return sorted(
|
||||
set(STATE_PROGRAM_PHASE.get(self.device.device_type, {}).values())
|
||||
)
|
||||
phases = PROGRAM_PHASE[self.device.device_type].keys()
|
||||
return sorted([PROGRAM_PHASE_TRANSLATION.get(phase, phase) for phase in phases])
|
||||
|
||||
|
||||
class MieleProgramIdSensor(MieleSensor):
|
||||
|
@@ -291,6 +291,7 @@
|
||||
"not_running": "Not running",
|
||||
"pre_brewing": "Pre-brewing",
|
||||
"pre_dishwash": "Pre-cleaning",
|
||||
"pre_heating": "Pre-heating",
|
||||
"pre_wash": "Pre-wash",
|
||||
"process_finished": "Process finished",
|
||||
"process_running": "Process running",
|
||||
|
@@ -29,7 +29,7 @@ from .const import (
|
||||
CONF_SLAVE_COUNT,
|
||||
CONF_VIRTUAL_COUNT,
|
||||
)
|
||||
from .entity import BasePlatform
|
||||
from .entity import ModbusBaseEntity
|
||||
from .modbus import ModbusHub
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
@@ -59,7 +59,7 @@ async def async_setup_platform(
|
||||
async_add_entities(sensors)
|
||||
|
||||
|
||||
class ModbusBinarySensor(BasePlatform, RestoreEntity, BinarySensorEntity):
|
||||
class ModbusBinarySensor(ModbusBaseEntity, RestoreEntity, BinarySensorEntity):
|
||||
"""Modbus binary sensor."""
|
||||
|
||||
def __init__(
|
||||
|
@@ -101,7 +101,7 @@ from .const import (
|
||||
CONF_WRITE_REGISTERS,
|
||||
DataType,
|
||||
)
|
||||
from .entity import BaseStructPlatform
|
||||
from .entity import ModbusStructEntity
|
||||
from .modbus import ModbusHub
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
@@ -131,7 +131,7 @@ async def async_setup_platform(
|
||||
async_add_entities(ModbusThermostat(hass, hub, config) for config in climates)
|
||||
|
||||
|
||||
class ModbusThermostat(BaseStructPlatform, RestoreEntity, ClimateEntity):
|
||||
class ModbusThermostat(ModbusStructEntity, RestoreEntity, ClimateEntity):
|
||||
"""Representation of a Modbus Thermostat."""
|
||||
|
||||
_attr_supported_features = (
|
||||
|
@@ -23,7 +23,7 @@ from .const import (
|
||||
CONF_STATUS_REGISTER,
|
||||
CONF_STATUS_REGISTER_TYPE,
|
||||
)
|
||||
from .entity import BasePlatform
|
||||
from .entity import ModbusBaseEntity
|
||||
from .modbus import ModbusHub
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
@@ -42,7 +42,7 @@ async def async_setup_platform(
|
||||
async_add_entities(ModbusCover(hass, hub, config) for config in covers)
|
||||
|
||||
|
||||
class ModbusCover(BasePlatform, CoverEntity, RestoreEntity):
|
||||
class ModbusCover(ModbusBaseEntity, CoverEntity, RestoreEntity):
|
||||
"""Representation of a Modbus cover."""
|
||||
|
||||
_attr_supported_features = CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE
|
||||
|
@@ -68,7 +68,7 @@ from .const import (
|
||||
from .modbus import ModbusHub
|
||||
|
||||
|
||||
class BasePlatform(Entity):
|
||||
class ModbusBaseEntity(Entity):
|
||||
"""Base for readonly platforms."""
|
||||
|
||||
_value: str | None = None
|
||||
@@ -154,7 +154,7 @@ class BasePlatform(Entity):
|
||||
)
|
||||
|
||||
|
||||
class BaseStructPlatform(BasePlatform, RestoreEntity):
|
||||
class ModbusStructEntity(ModbusBaseEntity, RestoreEntity):
|
||||
"""Base class representing a sensor/climate."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, hub: ModbusHub, config: dict) -> None:
|
||||
@@ -261,7 +261,7 @@ class BaseStructPlatform(BasePlatform, RestoreEntity):
|
||||
return self.__process_raw_value(val[0])
|
||||
|
||||
|
||||
class BaseSwitch(BasePlatform, ToggleEntity, RestoreEntity):
|
||||
class ModbusToggleEntity(ModbusBaseEntity, ToggleEntity, RestoreEntity):
|
||||
"""Base class representing a Modbus switch."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, hub: ModbusHub, config: dict) -> None:
|
||||
|
@@ -12,7 +12,7 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import get_hub
|
||||
from .const import CONF_FANS
|
||||
from .entity import BaseSwitch
|
||||
from .entity import ModbusToggleEntity
|
||||
from .modbus import ModbusHub
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
@@ -31,7 +31,7 @@ async def async_setup_platform(
|
||||
async_add_entities(ModbusFan(hass, hub, config) for config in fans)
|
||||
|
||||
|
||||
class ModbusFan(BaseSwitch, FanEntity):
|
||||
class ModbusFan(ModbusToggleEntity, FanEntity):
|
||||
"""Class representing a Modbus fan."""
|
||||
|
||||
def __init__(
|
||||
|
@@ -30,7 +30,7 @@ from .const import (
|
||||
LIGHT_MODBUS_SCALE_MAX,
|
||||
LIGHT_MODBUS_SCALE_MIN,
|
||||
)
|
||||
from .entity import BaseSwitch
|
||||
from .entity import ModbusToggleEntity
|
||||
from .modbus import ModbusHub
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
@@ -49,7 +49,7 @@ async def async_setup_platform(
|
||||
async_add_entities(ModbusLight(hass, hub, config) for config in lights)
|
||||
|
||||
|
||||
class ModbusLight(BaseSwitch, LightEntity):
|
||||
class ModbusLight(ModbusToggleEntity, LightEntity):
|
||||
"""Class representing a Modbus light."""
|
||||
|
||||
def __init__(
|
||||
|
@@ -26,7 +26,7 @@ from homeassistant.helpers.update_coordinator import (
|
||||
|
||||
from . import get_hub
|
||||
from .const import _LOGGER, CONF_SLAVE_COUNT, CONF_VIRTUAL_COUNT
|
||||
from .entity import BaseStructPlatform
|
||||
from .entity import ModbusStructEntity
|
||||
from .modbus import ModbusHub
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
@@ -56,7 +56,7 @@ async def async_setup_platform(
|
||||
async_add_entities(sensors)
|
||||
|
||||
|
||||
class ModbusRegisterSensor(BaseStructPlatform, RestoreSensor, SensorEntity):
|
||||
class ModbusRegisterSensor(ModbusStructEntity, RestoreSensor, SensorEntity):
|
||||
"""Modbus register sensor."""
|
||||
|
||||
def __init__(
|
||||
|
@@ -11,7 +11,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import get_hub
|
||||
from .entity import BaseSwitch
|
||||
from .entity import ModbusToggleEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
@@ -29,7 +29,7 @@ async def async_setup_platform(
|
||||
async_add_entities(ModbusSwitch(hass, hub, config) for config in switches)
|
||||
|
||||
|
||||
class ModbusSwitch(BaseSwitch, SwitchEntity):
|
||||
class ModbusSwitch(ModbusToggleEntity, SwitchEntity):
|
||||
"""Base class representing a Modbus switch."""
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
|
@@ -89,7 +89,7 @@ async def async_setup_entry(
|
||||
elif service_call.service == SERVICE_RESTORE:
|
||||
entity.restore()
|
||||
|
||||
@service.verify_domain_control(hass, DOMAIN)
|
||||
@service.verify_domain_control(DOMAIN)
|
||||
async def async_service_handle(service_call: core.ServiceCall) -> None:
|
||||
"""Handle for services."""
|
||||
entities = await platform.async_extract_from_service(service_call)
|
||||
|
@@ -1209,7 +1209,6 @@ PLATFORM_ENTITY_FIELDS: dict[str, dict[str, PlatformField]] = {
|
||||
default=lambda config: bool(config.get(CONF_DIRECTION_COMMAND_TOPIC)),
|
||||
),
|
||||
},
|
||||
Platform.NOTIFY.value: {},
|
||||
Platform.LIGHT.value: {
|
||||
CONF_SCHEMA: PlatformField(
|
||||
selector=LIGHT_SCHEMA_SELECTOR,
|
||||
@@ -1225,6 +1224,7 @@ PLATFORM_ENTITY_FIELDS: dict[str, dict[str, PlatformField]] = {
|
||||
),
|
||||
},
|
||||
Platform.LOCK.value: {},
|
||||
Platform.NOTIFY.value: {},
|
||||
Platform.SENSOR.value: {
|
||||
CONF_DEVICE_CLASS: PlatformField(
|
||||
selector=SENSOR_DEVICE_CLASS_SELECTOR, required=False
|
||||
|
@@ -321,11 +321,11 @@
|
||||
"code_arm_required": "Code arm required",
|
||||
"code_disarm_required": "Code disarm required",
|
||||
"code_trigger_required": "Code trigger required",
|
||||
"color_temp_template": "Color temperature template",
|
||||
"command_template": "Command template",
|
||||
"command_topic": "Command topic",
|
||||
"command_off_template": "Command \"off\" template",
|
||||
"command_on_template": "Command \"on\" template",
|
||||
"color_temp_template": "Color temperature template",
|
||||
"force_update": "Force update",
|
||||
"green_template": "Green template",
|
||||
"last_reset_value_template": "Last reset value template",
|
||||
@@ -358,11 +358,11 @@
|
||||
"code_arm_required": "If set, the code is required to arm the alarm. If not set, the code is not validated.",
|
||||
"code_disarm_required": "If set, the code is required to disarm the alarm. If not set, the code is not validated.",
|
||||
"code_trigger_required": "If set, the code is required to manually trigger the alarm. If not set, the code is not validated.",
|
||||
"color_temp_template": "[Template](https://www.home-assistant.io/docs/configuration/templating/#using-value-templates-with-mqtt) to extract color temperature in Kelvin from the state payload value. Expected result of the template is an integer.",
|
||||
"command_off_template": "The [template](https://www.home-assistant.io/docs/configuration/templating/#using-command-templates-with-mqtt) for \"off\" state changes. Available variables are: `state` and `transition`.",
|
||||
"command_on_template": "The [template](https://www.home-assistant.io/docs/configuration/templating/#using-command-templates-with-mqtt) for \"on\" state changes. Available variables: `state`, `brightness`, `color_temp`, `red`, `green`, `blue`, `hue`, `sat`, `flash`, `transition` and `effect`. Values `red`, `green`, `blue` and `brightness` are provided as integers from range 0-255. Value of `hue` is provided as float from range 0-360. Value of `sat` is provided as float from range 0-100. Value of `color_temp` is provided as integer representing Kelvin units.",
|
||||
"command_template": "A [template](https://www.home-assistant.io/docs/configuration/templating/#using-command-templates-with-mqtt) to render the payload to be published at the command topic. [Learn more.]({url}#command_template)",
|
||||
"command_topic": "The publishing topic that will be used to control the {platform} entity. [Learn more.]({url}#command_topic)",
|
||||
"color_temp_template": "[Template](https://www.home-assistant.io/docs/configuration/templating/#using-value-templates-with-mqtt) to extract color temperature in Kelvin from the state payload value. Expected result of the template is an integer.",
|
||||
"force_update": "Sends update events even if the value hasn’t changed. Useful if you want to have meaningful value graphs in history. [Learn more.]({url}#force_update)",
|
||||
"green_template": "[Template](https://www.home-assistant.io/docs/configuration/templating/#using-value-templates-with-mqtt) to extract green color from the state payload value. Expected result of the template is an integer from 0-255 range.",
|
||||
"last_reset_value_template": "Defines a [template](https://www.home-assistant.io/docs/configuration/templating/#using-value-templates-with-mqtt) to extract the last reset. When Last reset template is set, the State class option must be Total. [Learn more.]({url}#last_reset_value_template)",
|
||||
@@ -1261,6 +1261,12 @@
|
||||
"diagnostic": "Diagnostic"
|
||||
}
|
||||
},
|
||||
"image_processing_mode": {
|
||||
"options": {
|
||||
"image_data": "Image data is received",
|
||||
"image_url": "Image URL is received"
|
||||
}
|
||||
},
|
||||
"light_schema": {
|
||||
"options": {
|
||||
"basic": "Default schema",
|
||||
|
@@ -143,7 +143,7 @@ async def build_main_listing(hass: HomeAssistant) -> BrowseMedia:
|
||||
children.extend(item.children)
|
||||
else:
|
||||
children.append(item)
|
||||
except media_source.BrowseError:
|
||||
except BrowseError:
|
||||
pass
|
||||
|
||||
return BrowseMedia(
|
||||
|
@@ -18,9 +18,6 @@
|
||||
},
|
||||
"sps30_caqi_level": {
|
||||
"default": "mdi:air-filter"
|
||||
},
|
||||
"sps30_pm4": {
|
||||
"default": "mdi:molecule"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -324,6 +324,7 @@ SENSORS: tuple[NAMSensorEntityDescription, ...] = (
|
||||
translation_key="sps30_pm4",
|
||||
suggested_display_precision=0,
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM4,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda sensors: sensors.sps30_p4,
|
||||
),
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["nessclient"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["nessclient==1.2.0"]
|
||||
"requirements": ["nessclient==1.3.1"]
|
||||
}
|
||||
|
@@ -33,7 +33,7 @@ async def async_setup_entry(
|
||||
await cleanup_device(hass, config_entry)
|
||||
|
||||
coordinator = NordPoolDataUpdateCoordinator(hass, config_entry)
|
||||
await coordinator.fetch_data(dt_util.utcnow())
|
||||
await coordinator.fetch_data(dt_util.utcnow(), True)
|
||||
if not coordinator.last_update_success:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
|
@@ -13,7 +13,6 @@ from pynordpool import (
|
||||
DeliveryPeriodEntry,
|
||||
DeliveryPeriodsData,
|
||||
NordPoolClient,
|
||||
NordPoolEmptyResponseError,
|
||||
NordPoolError,
|
||||
NordPoolResponseError,
|
||||
)
|
||||
@@ -22,7 +21,7 @@ from homeassistant.const import CONF_CURRENCY
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.event import async_track_point_in_utc_time
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import CONF_AREAS, DOMAIN, LOGGER
|
||||
@@ -67,14 +66,26 @@ class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodsData]):
|
||||
self.unsub()
|
||||
self.unsub = None
|
||||
|
||||
async def fetch_data(self, now: datetime) -> None:
|
||||
async def fetch_data(self, now: datetime, initial: bool = False) -> None:
|
||||
"""Fetch data from Nord Pool."""
|
||||
self.unsub = async_track_point_in_utc_time(
|
||||
self.hass, self.fetch_data, self.get_next_interval(dt_util.utcnow())
|
||||
)
|
||||
data = await self.api_call()
|
||||
if data and data.entries:
|
||||
self.async_set_updated_data(data)
|
||||
current_day = dt_util.utcnow().strftime("%Y-%m-%d")
|
||||
for entry in data.entries:
|
||||
if entry.requested_date == current_day:
|
||||
LOGGER.debug("Data for current day found")
|
||||
self.async_set_updated_data(data)
|
||||
return
|
||||
if data and not data.entries and not initial:
|
||||
# Empty response, use cache
|
||||
LOGGER.debug("No data entries received")
|
||||
return
|
||||
self.async_set_update_error(
|
||||
UpdateFailed(translation_domain=DOMAIN, translation_key="no_day_data")
|
||||
)
|
||||
|
||||
async def api_call(self, retry: int = 3) -> DeliveryPeriodsData | None:
|
||||
"""Make api call to retrieve data with retry if failure."""
|
||||
@@ -96,16 +107,16 @@ class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodsData]):
|
||||
aiohttp.ClientError,
|
||||
) as error:
|
||||
LOGGER.debug("Connection error: %s", error)
|
||||
self.async_set_update_error(error)
|
||||
if self.data is None:
|
||||
self.async_set_update_error( # type: ignore[unreachable]
|
||||
UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="could_not_fetch_data",
|
||||
translation_placeholders={"error": str(error)},
|
||||
)
|
||||
)
|
||||
return self.data
|
||||
|
||||
if data:
|
||||
current_day = dt_util.utcnow().strftime("%Y-%m-%d")
|
||||
for entry in data.entries:
|
||||
if entry.requested_date == current_day:
|
||||
LOGGER.debug("Data for current day found")
|
||||
return data
|
||||
|
||||
self.async_set_update_error(NordPoolEmptyResponseError("No current day data"))
|
||||
return data
|
||||
|
||||
def merge_price_entries(self) -> list[DeliveryPeriodEntry]:
|
||||
|
@@ -157,6 +157,12 @@
|
||||
},
|
||||
"connection_error": {
|
||||
"message": "There was a connection error connecting to the API. Try again later."
|
||||
},
|
||||
"no_day_data": {
|
||||
"message": "Data for current day is missing"
|
||||
},
|
||||
"could_not_fetch_data": {
|
||||
"message": "Data could not be retrieved: {error}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -291,6 +291,12 @@ class NumberDeviceClass(StrEnum):
|
||||
Unit of measurement: `μg/m³`
|
||||
"""
|
||||
|
||||
PM4 = "pm4"
|
||||
"""Particulate matter <= 4 μm.
|
||||
|
||||
Unit of measurement: `μg/m³`
|
||||
"""
|
||||
|
||||
POWER_FACTOR = "power_factor"
|
||||
"""Power factor.
|
||||
|
||||
@@ -510,6 +516,7 @@ DEVICE_CLASS_UNITS: dict[NumberDeviceClass, set[type[StrEnum] | str | None]] = {
|
||||
NumberDeviceClass.PM1: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
|
||||
NumberDeviceClass.PM10: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
|
||||
NumberDeviceClass.PM25: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
|
||||
NumberDeviceClass.PM4: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
|
||||
NumberDeviceClass.POWER_FACTOR: {PERCENTAGE, None},
|
||||
NumberDeviceClass.POWER: {
|
||||
UnitOfPower.MILLIWATT,
|
||||
|
@@ -12,6 +12,7 @@ import voluptuous as vol
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
@@ -25,10 +26,77 @@ SCHEMA_DEVICE = vol.Schema(
|
||||
|
||||
|
||||
class PooldoseConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Seko Pooldose."""
|
||||
"""Config flow for the Pooldose integration including DHCP discovery."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow and store the discovered IP address."""
|
||||
super().__init__()
|
||||
self._discovered_ip: str | None = None
|
||||
|
||||
async def _validate_host(
|
||||
self, host: str
|
||||
) -> tuple[str | None, dict[str, str] | None, dict[str, str] | None]:
|
||||
"""Validate the host and return (serial_number, api_versions, errors)."""
|
||||
client = PooldoseClient(host)
|
||||
client_status = await client.connect()
|
||||
if client_status == RequestStatus.HOST_UNREACHABLE:
|
||||
return None, None, {"base": "cannot_connect"}
|
||||
if client_status == RequestStatus.PARAMS_FETCH_FAILED:
|
||||
return None, None, {"base": "params_fetch_failed"}
|
||||
if client_status != RequestStatus.SUCCESS:
|
||||
return None, None, {"base": "cannot_connect"}
|
||||
|
||||
api_status, api_versions = client.check_apiversion_supported()
|
||||
if api_status == RequestStatus.NO_DATA:
|
||||
return None, None, {"base": "api_not_set"}
|
||||
if api_status == RequestStatus.API_VERSION_UNSUPPORTED:
|
||||
return None, api_versions, {"base": "api_not_supported"}
|
||||
|
||||
device_info = client.device_info
|
||||
if not device_info:
|
||||
return None, None, {"base": "no_device_info"}
|
||||
serial_number = device_info.get("SERIAL_NUMBER")
|
||||
if not serial_number:
|
||||
return None, None, {"base": "no_serial_number"}
|
||||
|
||||
return serial_number, None, None
|
||||
|
||||
async def async_step_dhcp(
|
||||
self, discovery_info: DhcpServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle DHCP discovery: validate device and update IP if needed."""
|
||||
serial_number, _, _ = await self._validate_host(discovery_info.ip)
|
||||
if not serial_number:
|
||||
return self.async_abort(reason="no_serial_number")
|
||||
|
||||
await self.async_set_unique_id(serial_number)
|
||||
|
||||
# Conditionally update IP and abort if entry exists
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.ip})
|
||||
|
||||
# Continue with new device flow
|
||||
self._discovered_ip = discovery_info.ip
|
||||
return self.async_show_form(
|
||||
step_id="dhcp_confirm",
|
||||
description_placeholders={
|
||||
"ip": discovery_info.ip,
|
||||
"mac": discovery_info.macaddress,
|
||||
"name": f"PoolDose {serial_number}",
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_dhcp_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Create the entry after the confirmation dialog."""
|
||||
discovered_ip = self._discovered_ip
|
||||
return self.async_create_entry(
|
||||
title=f"PoolDose {self.unique_id}",
|
||||
data={CONF_HOST: discovered_ip},
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -40,58 +108,16 @@ class PooldoseConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
host = user_input[CONF_HOST]
|
||||
client = PooldoseClient(host)
|
||||
client_status = await client.connect()
|
||||
if client_status == RequestStatus.HOST_UNREACHABLE:
|
||||
serial_number, api_versions, errors = await self._validate_host(host)
|
||||
if errors:
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=SCHEMA_DEVICE,
|
||||
errors={"base": "cannot_connect"},
|
||||
)
|
||||
if client_status == RequestStatus.PARAMS_FETCH_FAILED:
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=SCHEMA_DEVICE,
|
||||
errors={"base": "params_fetch_failed"},
|
||||
)
|
||||
if client_status != RequestStatus.SUCCESS:
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=SCHEMA_DEVICE,
|
||||
errors={"base": "cannot_connect"},
|
||||
)
|
||||
|
||||
api_status, api_versions = client.check_apiversion_supported()
|
||||
if api_status == RequestStatus.NO_DATA:
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=SCHEMA_DEVICE,
|
||||
errors={"base": "api_not_set"},
|
||||
)
|
||||
if api_status == RequestStatus.API_VERSION_UNSUPPORTED:
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=SCHEMA_DEVICE,
|
||||
errors={"base": "api_not_supported"},
|
||||
errors=errors,
|
||||
description_placeholders=api_versions,
|
||||
)
|
||||
|
||||
device_info = client.device_info
|
||||
if not device_info:
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=SCHEMA_DEVICE,
|
||||
errors={"base": "no_device_info"},
|
||||
)
|
||||
serial_number = device_info.get("SERIAL_NUMBER")
|
||||
if not serial_number:
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=SCHEMA_DEVICE,
|
||||
errors={"base": "no_serial_number"},
|
||||
)
|
||||
|
||||
await self.async_set_unique_id(serial_number)
|
||||
await self.async_set_unique_id(serial_number, raise_on_progress=False)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=f"PoolDose {serial_number}",
|
||||
|
@@ -3,6 +3,11 @@
|
||||
"name": "SEKO PoolDose",
|
||||
"codeowners": ["@lmaertin"],
|
||||
"config_flow": true,
|
||||
"dhcp": [
|
||||
{
|
||||
"hostname": "kommspot"
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/pooldose",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
|
@@ -44,12 +44,8 @@ rules:
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: todo
|
||||
comment: DHCP discovery is possible
|
||||
discovery:
|
||||
status: todo
|
||||
comment: DHCP discovery is possible
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
|
@@ -10,6 +10,10 @@
|
||||
"data_description": {
|
||||
"host": "IP address or hostname of your device"
|
||||
}
|
||||
},
|
||||
"dhcp_confirm": {
|
||||
"title": "Confirm DHCP discovered PoolDose device",
|
||||
"description": "A PoolDose device was found on your network at {ip} with MAC address {mac}.\n\nDo you want to add {name} to Home Assistant?"
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
@@ -22,7 +26,10 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"no_device_info": "Unable to retrieve device information",
|
||||
"no_serial_number": "No serial number found on the device"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
@@ -4,7 +4,6 @@ from homeassistant.const import CONF_API_TOKEN, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from .const import ATTR_POWER, ATTR_POWER_P3
|
||||
from .coordinator import ElecPricesDataUpdateCoordinator, PVPCConfigEntry
|
||||
from .helpers import get_enabled_sensor_keys
|
||||
|
||||
@@ -23,23 +22,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: PVPCConfigEntry) -> bool
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
entry.async_on_unload(entry.add_update_listener(async_update_options))
|
||||
return True
|
||||
|
||||
|
||||
async def async_update_options(hass: HomeAssistant, entry: PVPCConfigEntry) -> None:
|
||||
"""Handle options update."""
|
||||
if any(
|
||||
entry.data.get(attrib) != entry.options.get(attrib)
|
||||
for attrib in (ATTR_POWER, ATTR_POWER_P3, CONF_API_TOKEN)
|
||||
):
|
||||
# update entry replacing data with new options
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, data={**entry.data, **entry.options}
|
||||
)
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: PVPCConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
@@ -13,7 +13,7 @@ from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
OptionsFlowWithReload,
|
||||
)
|
||||
from homeassistant.const import CONF_API_TOKEN, CONF_NAME
|
||||
from homeassistant.core import callback
|
||||
@@ -178,7 +178,7 @@ class TariffSelectorConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_show_form(step_id="reauth_confirm", data_schema=data_schema)
|
||||
|
||||
|
||||
class PVPCOptionsFlowHandler(OptionsFlow):
|
||||
class PVPCOptionsFlowHandler(OptionsFlowWithReload):
|
||||
"""Handle PVPC options."""
|
||||
|
||||
_power: float | None = None
|
||||
|
@@ -29,13 +29,16 @@ class ElecPricesDataUpdateCoordinator(DataUpdateCoordinator[EsiosApiData]):
|
||||
self, hass: HomeAssistant, entry: PVPCConfigEntry, sensor_keys: set[str]
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
config = entry.data.copy()
|
||||
config.update({attr: value for attr, value in entry.options.items() if value})
|
||||
|
||||
self.api = PVPCData(
|
||||
session=async_get_clientsession(hass),
|
||||
tariff=entry.data[ATTR_TARIFF],
|
||||
tariff=config[ATTR_TARIFF],
|
||||
local_timezone=hass.config.time_zone,
|
||||
power=entry.data[ATTR_POWER],
|
||||
power_valley=entry.data[ATTR_POWER_P3],
|
||||
api_token=entry.data.get(CONF_API_TOKEN),
|
||||
power=config[ATTR_POWER],
|
||||
power_valley=config[ATTR_POWER_P3],
|
||||
api_token=config.get(CONF_API_TOKEN),
|
||||
sensor_keys=tuple(sensor_keys),
|
||||
)
|
||||
super().__init__(
|
||||
|
@@ -256,6 +256,7 @@ async def setup_device_v1(
|
||||
RoborockMqttClientV1, user_data, DeviceData(device, product_info.model)
|
||||
)
|
||||
try:
|
||||
await mqtt_client.async_connect()
|
||||
networking = await mqtt_client.get_networking()
|
||||
if networking is None:
|
||||
# If the api does not return an error but does return None for
|
||||
@@ -319,8 +320,11 @@ async def setup_device_a01(
|
||||
product_info: HomeDataProduct,
|
||||
) -> RoborockDataUpdateCoordinatorA01 | None:
|
||||
"""Set up a A01 protocol device."""
|
||||
mqtt_client = RoborockMqttClientA01(
|
||||
user_data, DeviceData(device, product_info.name), product_info.category
|
||||
mqtt_client = await hass.async_add_executor_job(
|
||||
RoborockMqttClientA01,
|
||||
user_data,
|
||||
DeviceData(device, product_info.model),
|
||||
product_info.category,
|
||||
)
|
||||
coord = RoborockDataUpdateCoordinatorA01(
|
||||
hass, entry, device, product_info, mqtt_client
|
||||
|
@@ -272,6 +272,7 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
|
||||
"""Verify that the api is reachable. If it is not, switch clients."""
|
||||
if isinstance(self.api, RoborockLocalClientV1):
|
||||
try:
|
||||
await self.api.async_connect()
|
||||
await self.api.ping()
|
||||
except RoborockException:
|
||||
_LOGGER.warning(
|
||||
|
@@ -142,7 +142,7 @@ async def root_payload(
|
||||
children.extend(browse_item.children)
|
||||
else:
|
||||
children.append(browse_item)
|
||||
except media_source.BrowseError:
|
||||
except BrowseError:
|
||||
pass
|
||||
|
||||
if len(children) == 1:
|
||||
|
26
homeassistant/components/satel_integra/diagnostics.py
Normal file
26
homeassistant/components/satel_integra/diagnostics.py
Normal file
@@ -0,0 +1,26 @@
|
||||
"""Diagnostics support for Satel Integra."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_CODE
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
TO_REDACT = {CONF_CODE}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for the config entry."""
|
||||
diag: dict[str, Any] = {}
|
||||
|
||||
diag["config_entry_data"] = dict(entry.data)
|
||||
diag["config_entry_options"] = async_redact_data(entry.options, TO_REDACT)
|
||||
|
||||
diag["subentries"] = dict(entry.subentries)
|
||||
|
||||
return diag
|
@@ -114,7 +114,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ScrapeConfigEntry) -> bo
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
entry.async_on_unload(entry.add_update_listener(update_listener))
|
||||
|
||||
return True
|
||||
|
||||
@@ -124,11 +123,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Handle options update."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def async_remove_config_entry_device(
|
||||
hass: HomeAssistant, entry: ConfigEntry, device: DeviceEntry
|
||||
) -> bool:
|
||||
|
@@ -308,6 +308,7 @@ class ScrapeConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
|
||||
config_flow = CONFIG_FLOW
|
||||
options_flow = OPTIONS_FLOW
|
||||
options_flow_reloads = True
|
||||
|
||||
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
|
||||
"""Return config entry title."""
|
||||
|
@@ -862,16 +862,25 @@ class SensorEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""Return a custom unit, or UNDEFINED if not compatible with the native unit."""
|
||||
assert self.registry_entry
|
||||
if (
|
||||
(sensor_options := self.registry_entry.options.get(primary_key))
|
||||
and secondary_key in sensor_options
|
||||
and (device_class := self.device_class) in UNIT_CONVERTERS
|
||||
and self.__native_unit_of_measurement_compat
|
||||
in UNIT_CONVERTERS[device_class].VALID_UNITS
|
||||
and (custom_unit := sensor_options[secondary_key])
|
||||
in UNIT_CONVERTERS[device_class].VALID_UNITS
|
||||
sensor_options := self.registry_entry.options.get(primary_key)
|
||||
) is None or secondary_key not in sensor_options:
|
||||
return UNDEFINED
|
||||
|
||||
if (device_class := self.device_class) not in UNIT_CONVERTERS:
|
||||
return UNDEFINED
|
||||
|
||||
if (
|
||||
self.__native_unit_of_measurement_compat
|
||||
not in UNIT_CONVERTERS[device_class].VALID_UNITS
|
||||
):
|
||||
return cast(str, custom_unit)
|
||||
return UNDEFINED
|
||||
return UNDEFINED
|
||||
|
||||
if (custom_unit := sensor_options[secondary_key]) not in UNIT_CONVERTERS[
|
||||
device_class
|
||||
].VALID_UNITS:
|
||||
return UNDEFINED
|
||||
|
||||
return cast(str, custom_unit)
|
||||
|
||||
@callback
|
||||
def async_registry_entry_updated(self) -> None:
|
||||
|
@@ -326,6 +326,12 @@ class SensorDeviceClass(StrEnum):
|
||||
Unit of measurement: `μg/m³`
|
||||
"""
|
||||
|
||||
PM4 = "pm4"
|
||||
"""Particulate matter <= 4 μm.
|
||||
|
||||
Unit of measurement: `μg/m³`
|
||||
"""
|
||||
|
||||
POWER_FACTOR = "power_factor"
|
||||
"""Power factor.
|
||||
|
||||
@@ -621,6 +627,7 @@ DEVICE_CLASS_UNITS: dict[SensorDeviceClass, set[type[StrEnum] | str | None]] = {
|
||||
SensorDeviceClass.PM1: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
|
||||
SensorDeviceClass.PM10: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
|
||||
SensorDeviceClass.PM25: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
|
||||
SensorDeviceClass.PM4: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
|
||||
SensorDeviceClass.POWER_FACTOR: {PERCENTAGE, None},
|
||||
SensorDeviceClass.POWER: {
|
||||
UnitOfPower.MILLIWATT,
|
||||
@@ -755,6 +762,7 @@ DEVICE_CLASS_STATE_CLASSES: dict[SensorDeviceClass, set[SensorStateClass]] = {
|
||||
SensorDeviceClass.PM1: {SensorStateClass.MEASUREMENT},
|
||||
SensorDeviceClass.PM10: {SensorStateClass.MEASUREMENT},
|
||||
SensorDeviceClass.PM25: {SensorStateClass.MEASUREMENT},
|
||||
SensorDeviceClass.PM4: {SensorStateClass.MEASUREMENT},
|
||||
SensorDeviceClass.POWER_FACTOR: {SensorStateClass.MEASUREMENT},
|
||||
SensorDeviceClass.POWER: {SensorStateClass.MEASUREMENT},
|
||||
SensorDeviceClass.PRECIPITATION: set(SensorStateClass),
|
||||
|
@@ -65,6 +65,7 @@ CONF_IS_PH = "is_ph"
|
||||
CONF_IS_PM1 = "is_pm1"
|
||||
CONF_IS_PM10 = "is_pm10"
|
||||
CONF_IS_PM25 = "is_pm25"
|
||||
CONF_IS_PM4 = "is_pm4"
|
||||
CONF_IS_POWER = "is_power"
|
||||
CONF_IS_POWER_FACTOR = "is_power_factor"
|
||||
CONF_IS_PRECIPITATION = "is_precipitation"
|
||||
@@ -126,6 +127,7 @@ ENTITY_CONDITIONS = {
|
||||
SensorDeviceClass.PM1: [{CONF_TYPE: CONF_IS_PM1}],
|
||||
SensorDeviceClass.PM10: [{CONF_TYPE: CONF_IS_PM10}],
|
||||
SensorDeviceClass.PM25: [{CONF_TYPE: CONF_IS_PM25}],
|
||||
SensorDeviceClass.PM4: [{CONF_TYPE: CONF_IS_PM4}],
|
||||
SensorDeviceClass.PRECIPITATION: [{CONF_TYPE: CONF_IS_PRECIPITATION}],
|
||||
SensorDeviceClass.PRECIPITATION_INTENSITY: [
|
||||
{CONF_TYPE: CONF_IS_PRECIPITATION_INTENSITY}
|
||||
@@ -195,6 +197,7 @@ CONDITION_SCHEMA = vol.All(
|
||||
CONF_IS_PM1,
|
||||
CONF_IS_PM10,
|
||||
CONF_IS_PM25,
|
||||
CONF_IS_PM4,
|
||||
CONF_IS_PRECIPITATION,
|
||||
CONF_IS_PRECIPITATION_INTENSITY,
|
||||
CONF_IS_PRESSURE,
|
||||
|
@@ -64,6 +64,7 @@ CONF_PH = "ph"
|
||||
CONF_PM1 = "pm1"
|
||||
CONF_PM10 = "pm10"
|
||||
CONF_PM25 = "pm25"
|
||||
CONF_PM4 = "pm4"
|
||||
CONF_POWER = "power"
|
||||
CONF_POWER_FACTOR = "power_factor"
|
||||
CONF_PRECIPITATION = "precipitation"
|
||||
@@ -123,6 +124,7 @@ ENTITY_TRIGGERS = {
|
||||
SensorDeviceClass.PM1: [{CONF_TYPE: CONF_PM1}],
|
||||
SensorDeviceClass.PM10: [{CONF_TYPE: CONF_PM10}],
|
||||
SensorDeviceClass.PM25: [{CONF_TYPE: CONF_PM25}],
|
||||
SensorDeviceClass.PM4: [{CONF_TYPE: CONF_PM4}],
|
||||
SensorDeviceClass.POWER: [{CONF_TYPE: CONF_POWER}],
|
||||
SensorDeviceClass.POWER_FACTOR: [{CONF_TYPE: CONF_POWER_FACTOR}],
|
||||
SensorDeviceClass.PRECIPITATION: [{CONF_TYPE: CONF_PRECIPITATION}],
|
||||
@@ -193,6 +195,7 @@ TRIGGER_SCHEMA = vol.All(
|
||||
CONF_PM1,
|
||||
CONF_PM10,
|
||||
CONF_PM25,
|
||||
CONF_PM4,
|
||||
CONF_POWER,
|
||||
CONF_POWER_FACTOR,
|
||||
CONF_PRECIPITATION,
|
||||
|
@@ -34,6 +34,7 @@
|
||||
"is_pm1": "Current {entity_name} PM1 concentration level",
|
||||
"is_pm10": "Current {entity_name} PM10 concentration level",
|
||||
"is_pm25": "Current {entity_name} PM2.5 concentration level",
|
||||
"is_pm4": "Current {entity_name} PM4 concentration level",
|
||||
"is_power": "Current {entity_name} power",
|
||||
"is_power_factor": "Current {entity_name} power factor",
|
||||
"is_precipitation": "Current {entity_name} precipitation",
|
||||
@@ -90,6 +91,7 @@
|
||||
"pm1": "{entity_name} PM1 concentration changes",
|
||||
"pm10": "{entity_name} PM10 concentration changes",
|
||||
"pm25": "{entity_name} PM2.5 concentration changes",
|
||||
"pm4": "{entity_name} PM4 concentration changes",
|
||||
"power": "{entity_name} power changes",
|
||||
"power_factor": "{entity_name} power factor changes",
|
||||
"precipitation": "{entity_name} precipitation changes",
|
||||
|
@@ -37,7 +37,6 @@ from .utils import (
|
||||
async_remove_orphaned_entities,
|
||||
get_blu_trv_device_info,
|
||||
get_device_entry_gen,
|
||||
get_virtual_component_ids,
|
||||
is_block_momentary_input,
|
||||
is_rpc_momentary_input,
|
||||
is_view_for_platform,
|
||||
@@ -307,6 +306,13 @@ RPC_SENSORS: Final = {
|
||||
device_class=BinarySensorDeviceClass.OCCUPANCY,
|
||||
entity_class=RpcPresenceBinarySensor,
|
||||
),
|
||||
"presencezone_state": RpcBinarySensorDescription(
|
||||
key="presencezone",
|
||||
sub_key="state",
|
||||
name="Occupancy",
|
||||
device_class=BinarySensorDeviceClass.OCCUPANCY,
|
||||
entity_class=RpcPresenceBinarySensor,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@@ -333,18 +339,12 @@ async def async_setup_entry(
|
||||
hass, config_entry, async_add_entities, RPC_SENSORS, RpcBinarySensor
|
||||
)
|
||||
|
||||
# the user can remove virtual components from the device configuration, so
|
||||
# we need to remove orphaned entities
|
||||
virtual_binary_sensor_ids = get_virtual_component_ids(
|
||||
coordinator.device.config, BINARY_SENSOR_PLATFORM
|
||||
)
|
||||
async_remove_orphaned_entities(
|
||||
hass,
|
||||
config_entry.entry_id,
|
||||
coordinator.mac,
|
||||
BINARY_SENSOR_PLATFORM,
|
||||
virtual_binary_sensor_ids,
|
||||
"boolean",
|
||||
coordinator.device.status,
|
||||
)
|
||||
return
|
||||
|
||||
|
@@ -9,6 +9,7 @@ from typing import TYPE_CHECKING, Any, Final
|
||||
|
||||
from aioshelly.const import BLU_TRV_IDENTIFIER, MODEL_BLU_GATEWAY_G3, RPC_GENERATIONS
|
||||
from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError, RpcCallError
|
||||
from aioshelly.rpc_device import RpcDevice
|
||||
|
||||
from homeassistant.components.button import (
|
||||
DOMAIN as BUTTON_PLATFORM,
|
||||
@@ -22,13 +23,13 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from .const import DOMAIN, LOGGER, SHELLY_GAS_MODELS
|
||||
from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry, ShellyRpcCoordinator
|
||||
from .entity import get_entity_block_device_info, get_entity_rpc_device_info
|
||||
from .utils import (
|
||||
async_remove_orphaned_entities,
|
||||
format_ble_addr,
|
||||
get_blu_trv_device_info,
|
||||
get_device_entry_gen,
|
||||
get_rpc_entity_name,
|
||||
@@ -112,12 +113,10 @@ def async_migrate_unique_ids(
|
||||
if not entity_entry.entity_id.startswith("button"):
|
||||
return None
|
||||
|
||||
device_name = slugify(coordinator.device.name)
|
||||
|
||||
for key in ("reboot", "self_test", "mute", "unmute"):
|
||||
old_unique_id = f"{device_name}_{key}"
|
||||
old_unique_id = f"{coordinator.mac}_{key}"
|
||||
if entity_entry.unique_id == old_unique_id:
|
||||
new_unique_id = f"{coordinator.mac}_{key}"
|
||||
new_unique_id = f"{coordinator.mac}-{key}"
|
||||
LOGGER.debug(
|
||||
"Migrating unique_id for %s entity from [%s] to [%s]",
|
||||
entity_entry.entity_id,
|
||||
@@ -130,6 +129,26 @@ def async_migrate_unique_ids(
|
||||
)
|
||||
}
|
||||
|
||||
if blutrv_key_ids := get_rpc_key_ids(coordinator.device.status, BLU_TRV_IDENTIFIER):
|
||||
assert isinstance(coordinator.device, RpcDevice)
|
||||
for _id in blutrv_key_ids:
|
||||
key = f"{BLU_TRV_IDENTIFIER}:{_id}"
|
||||
ble_addr: str = coordinator.device.config[key]["addr"]
|
||||
old_unique_id = f"{ble_addr}_calibrate"
|
||||
if entity_entry.unique_id == old_unique_id:
|
||||
new_unique_id = f"{format_ble_addr(ble_addr)}-{key}-calibrate"
|
||||
LOGGER.debug(
|
||||
"Migrating unique_id for %s entity from [%s] to [%s]",
|
||||
entity_entry.entity_id,
|
||||
old_unique_id,
|
||||
new_unique_id,
|
||||
)
|
||||
return {
|
||||
"new_unique_id": entity_entry.unique_id.replace(
|
||||
old_unique_id, new_unique_id
|
||||
)
|
||||
}
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@@ -264,7 +283,7 @@ class ShellyButton(ShellyBaseButton):
|
||||
"""Initialize Shelly button."""
|
||||
super().__init__(coordinator, description)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.mac}_{description.key}"
|
||||
self._attr_unique_id = f"{coordinator.mac}-{description.key}"
|
||||
if isinstance(coordinator, ShellyBlockCoordinator):
|
||||
self._attr_device_info = get_entity_block_device_info(coordinator)
|
||||
else:
|
||||
@@ -297,7 +316,7 @@ class ShellyBluTrvButton(ShellyBaseButton):
|
||||
ble_addr: str = config["addr"]
|
||||
fw_ver = coordinator.device.status[key].get("fw_ver")
|
||||
|
||||
self._attr_unique_id = f"{ble_addr}_{description.key}"
|
||||
self._attr_unique_id = f"{format_ble_addr(ble_addr)}-{key}-{description.key}"
|
||||
self._attr_device_info = get_blu_trv_device_info(
|
||||
config, ble_addr, coordinator.mac, fw_ver
|
||||
)
|
||||
|
@@ -29,7 +29,6 @@ from aioshelly.const import (
|
||||
)
|
||||
|
||||
from homeassistant.components.number import NumberMode
|
||||
from homeassistant.components.sensor import SensorDeviceClass
|
||||
from homeassistant.const import UnitOfVolumeFlowRate
|
||||
|
||||
DOMAIN: Final = "shelly"
|
||||
@@ -269,7 +268,15 @@ DEVICES_WITHOUT_FIRMWARE_CHANGELOG = (
|
||||
|
||||
CONF_GEN = "gen"
|
||||
|
||||
VIRTUAL_COMPONENTS = ("boolean", "button", "enum", "input", "number", "text")
|
||||
VIRTUAL_COMPONENTS = (
|
||||
"boolean",
|
||||
"button",
|
||||
"enum",
|
||||
"input",
|
||||
"number",
|
||||
"presencezone",
|
||||
"text",
|
||||
)
|
||||
VIRTUAL_COMPONENTS_MAP = {
|
||||
"binary_sensor": {"types": ["boolean"], "modes": ["label"]},
|
||||
"button": {"types": ["button"], "modes": ["button"]},
|
||||
@@ -290,14 +297,6 @@ API_WS_URL = "/api/shelly/ws"
|
||||
|
||||
COMPONENT_ID_PATTERN = re.compile(r"[a-z\d]+:\d+")
|
||||
|
||||
ROLE_TO_DEVICE_CLASS_MAP = {
|
||||
"current_humidity": SensorDeviceClass.HUMIDITY,
|
||||
"current_temperature": SensorDeviceClass.TEMPERATURE,
|
||||
"flow_rate": SensorDeviceClass.VOLUME_FLOW_RATE,
|
||||
"water_pressure": SensorDeviceClass.PRESSURE,
|
||||
"water_temperature": SensorDeviceClass.TEMPERATURE,
|
||||
}
|
||||
|
||||
# Mapping for units that require conversion to a Home Assistant recognized unit
|
||||
# e.g. "m3/min" to "m³/min"
|
||||
DEVICE_UNIT_MAP = {
|
||||
|
@@ -186,6 +186,11 @@ def async_setup_rpc_attribute_entities(
|
||||
|
||||
for key in key_instances:
|
||||
# Filter non-existing sensors
|
||||
if description.role and description.role != coordinator.device.config[
|
||||
key
|
||||
].get("role", "generic"):
|
||||
continue
|
||||
|
||||
if description.sub_key not in coordinator.device.status[
|
||||
key
|
||||
] and not description.supported(coordinator.device.status[key]):
|
||||
@@ -310,6 +315,7 @@ class RpcEntityDescription(EntityDescription):
|
||||
unit: Callable[[dict], str | None] | None = None
|
||||
options_fn: Callable[[dict], list[str]] | None = None
|
||||
entity_class: Callable | None = None
|
||||
role: str | None = None
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
|
@@ -460,9 +460,12 @@ class RpcShellyCctLight(RpcShellyLightBase):
|
||||
) -> None:
|
||||
"""Initialize light."""
|
||||
super().__init__(coordinator, key, attribute, description)
|
||||
color_temp_range = coordinator.device.config[f"cct:{self._id}"]["ct_range"]
|
||||
self._attr_min_color_temp_kelvin = color_temp_range[0]
|
||||
self._attr_max_color_temp_kelvin = color_temp_range[1]
|
||||
if color_temp_range := coordinator.device.config[key].get("ct_range"):
|
||||
self._attr_min_color_temp_kelvin = color_temp_range[0]
|
||||
self._attr_max_color_temp_kelvin = color_temp_range[1]
|
||||
else:
|
||||
self._attr_min_color_temp_kelvin = KELVIN_MIN_VALUE_WHITE
|
||||
self._attr_max_color_temp_kelvin = KELVIN_MAX_VALUE
|
||||
|
||||
@property
|
||||
def color_temp_kelvin(self) -> int:
|
||||
|
@@ -2,9 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Final, cast
|
||||
from functools import partial
|
||||
from typing import Any, Final, cast
|
||||
|
||||
from aioshelly.block_device import Block
|
||||
from aioshelly.const import RPC_GENERATIONS
|
||||
@@ -31,14 +31,18 @@ from homeassistant.const import (
|
||||
UnitOfEnergy,
|
||||
UnitOfFrequency,
|
||||
UnitOfPower,
|
||||
UnitOfPressure,
|
||||
UnitOfTemperature,
|
||||
UnitOfVolume,
|
||||
UnitOfVolumeFlowRate,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.entity_registry import RegistryEntry
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from .const import CONF_SLEEP_PERIOD, ROLE_TO_DEVICE_CLASS_MAP
|
||||
from .const import CONF_SLEEP_PERIOD, LOGGER
|
||||
from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry, ShellyRpcCoordinator
|
||||
from .entity import (
|
||||
BlockEntityDescription,
|
||||
@@ -60,7 +64,6 @@ from .utils import (
|
||||
get_device_entry_gen,
|
||||
get_device_uptime,
|
||||
get_shelly_air_lamp_life,
|
||||
get_virtual_component_ids,
|
||||
get_virtual_component_unit,
|
||||
is_rpc_wifi_stations_disabled,
|
||||
is_view_for_platform,
|
||||
@@ -78,7 +81,6 @@ class BlockSensorDescription(BlockEntityDescription, SensorEntityDescription):
|
||||
class RpcSensorDescription(RpcEntityDescription, SensorEntityDescription):
|
||||
"""Class to describe a RPC sensor."""
|
||||
|
||||
device_class_fn: Callable[[dict], SensorDeviceClass | None] | None = None
|
||||
emeter_phase: str | None = None
|
||||
|
||||
|
||||
@@ -105,12 +107,6 @@ class RpcSensor(ShellyRpcAttributeEntity, SensorEntity):
|
||||
if self.option_map:
|
||||
self._attr_options = list(self.option_map.values())
|
||||
|
||||
if description.device_class_fn is not None:
|
||||
if device_class := description.device_class_fn(
|
||||
coordinator.device.config[key]
|
||||
):
|
||||
self._attr_device_class = device_class
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return value of sensor."""
|
||||
@@ -1383,25 +1379,24 @@ RPC_SENSORS: Final = {
|
||||
),
|
||||
unit=lambda config: config["xfreq"]["unit"] or None,
|
||||
),
|
||||
"text": RpcSensorDescription(
|
||||
"text_generic": RpcSensorDescription(
|
||||
key="text",
|
||||
sub_key="value",
|
||||
removal_condition=lambda config, _status, key: not is_view_for_platform(
|
||||
config, key, SENSOR_PLATFORM
|
||||
),
|
||||
role="generic",
|
||||
),
|
||||
"number": RpcSensorDescription(
|
||||
"number_generic": RpcSensorDescription(
|
||||
key="number",
|
||||
sub_key="value",
|
||||
removal_condition=lambda config, _status, key: not is_view_for_platform(
|
||||
config, key, SENSOR_PLATFORM
|
||||
),
|
||||
unit=get_virtual_component_unit,
|
||||
device_class_fn=lambda config: ROLE_TO_DEVICE_CLASS_MAP.get(config["role"])
|
||||
if "role" in config
|
||||
else None,
|
||||
role="generic",
|
||||
),
|
||||
"enum": RpcSensorDescription(
|
||||
"enum_generic": RpcSensorDescription(
|
||||
key="enum",
|
||||
sub_key="value",
|
||||
removal_condition=lambda config, _status, key: not is_view_for_platform(
|
||||
@@ -1409,6 +1404,7 @@ RPC_SENSORS: Final = {
|
||||
),
|
||||
options_fn=lambda config: config["options"],
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
role="generic",
|
||||
),
|
||||
"valve_position": RpcSensorDescription(
|
||||
key="blutrv",
|
||||
@@ -1450,6 +1446,49 @@ RPC_SENSORS: Final = {
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=["dark", "twilight", "bright"],
|
||||
),
|
||||
"number_current_humidity": RpcSensorDescription(
|
||||
key="number",
|
||||
sub_key="value",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
suggested_display_precision=1,
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
role="current_humidity",
|
||||
),
|
||||
"number_current_temperature": RpcSensorDescription(
|
||||
key="number",
|
||||
sub_key="value",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
suggested_display_precision=1,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
role="current_temperature",
|
||||
),
|
||||
"number_flow_rate": RpcSensorDescription(
|
||||
key="number",
|
||||
sub_key="value",
|
||||
native_unit_of_measurement=UnitOfVolumeFlowRate.CUBIC_METERS_PER_MINUTE,
|
||||
device_class=SensorDeviceClass.VOLUME_FLOW_RATE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
role="flow_rate",
|
||||
),
|
||||
"number_water_pressure": RpcSensorDescription(
|
||||
key="number",
|
||||
sub_key="value",
|
||||
native_unit_of_measurement=UnitOfPressure.KPA,
|
||||
device_class=SensorDeviceClass.PRESSURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
role="water_pressure",
|
||||
),
|
||||
"number_water_temperature": RpcSensorDescription(
|
||||
key="number",
|
||||
sub_key="value",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
suggested_display_precision=1,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
role="water_temperature",
|
||||
),
|
||||
"presence_num_objects": RpcSensorDescription(
|
||||
key="presence",
|
||||
sub_key="num_objects",
|
||||
@@ -1458,9 +1497,128 @@ RPC_SENSORS: Final = {
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_class=RpcPresenceSensor,
|
||||
),
|
||||
"presencezone_num_objects": RpcSensorDescription(
|
||||
key="presencezone",
|
||||
sub_key="num_objects",
|
||||
translation_key="detected_objects",
|
||||
name="Detected objects",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_class=RpcPresenceSensor,
|
||||
),
|
||||
"object_water_consumption": RpcSensorDescription(
|
||||
key="object",
|
||||
sub_key="value",
|
||||
value=lambda status, _: float(status["counter"]["total"]),
|
||||
native_unit_of_measurement=UnitOfVolume.CUBIC_METERS,
|
||||
suggested_display_precision=3,
|
||||
device_class=SensorDeviceClass.WATER,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
role="water_consumption",
|
||||
),
|
||||
"object_energy_consumption": RpcSensorDescription(
|
||||
key="object",
|
||||
sub_key="value",
|
||||
value=lambda status, _: float(status["counter"]["total"]),
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
suggested_display_precision=2,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
role="phase_info",
|
||||
),
|
||||
"object_total_act_energy": RpcSensorDescription(
|
||||
key="object",
|
||||
sub_key="value",
|
||||
name="Total Active Energy",
|
||||
value=lambda status, _: float(status["total_act_energy"]),
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
suggested_display_precision=2,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
role="phase_info",
|
||||
),
|
||||
"object_total_power": RpcSensorDescription(
|
||||
key="object",
|
||||
sub_key="value",
|
||||
name="Total Power",
|
||||
value=lambda status, _: float(status["total_power"]),
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
suggested_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
suggested_display_precision=2,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
role="phase_info",
|
||||
),
|
||||
"object_phase_a_voltage": RpcSensorDescription(
|
||||
key="object",
|
||||
sub_key="value",
|
||||
name="Phase A voltage",
|
||||
value=lambda status, _: float(status["phase_a"]["voltage"]),
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
suggested_display_precision=1,
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
role="phase_info",
|
||||
),
|
||||
"object_phase_b_voltage": RpcSensorDescription(
|
||||
key="object",
|
||||
sub_key="value",
|
||||
name="Phase B voltage",
|
||||
value=lambda status, _: float(status["phase_b"]["voltage"]),
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
suggested_display_precision=1,
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
role="phase_info",
|
||||
),
|
||||
"object_phase_c_voltage": RpcSensorDescription(
|
||||
key="object",
|
||||
sub_key="value",
|
||||
name="Phase C voltage",
|
||||
value=lambda status, _: float(status["phase_c"]["voltage"]),
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
suggested_display_precision=1,
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
role="phase_info",
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@callback
|
||||
def async_migrate_unique_ids(
|
||||
coordinator: ShellyRpcCoordinator,
|
||||
entity_entry: er.RegistryEntry,
|
||||
) -> dict[str, Any] | None:
|
||||
"""Migrate sensor unique IDs to include role."""
|
||||
if not entity_entry.entity_id.startswith("sensor."):
|
||||
return None
|
||||
|
||||
for sensor_id in ("text", "number", "enum"):
|
||||
old_unique_id = entity_entry.unique_id
|
||||
if old_unique_id.endswith(f"-{sensor_id}"):
|
||||
if entity_entry.original_device_class == SensorDeviceClass.HUMIDITY:
|
||||
new_unique_id = f"{old_unique_id}_current_humidity"
|
||||
elif entity_entry.original_device_class == SensorDeviceClass.TEMPERATURE:
|
||||
new_unique_id = f"{old_unique_id}_current_temperature"
|
||||
else:
|
||||
new_unique_id = f"{old_unique_id}_generic"
|
||||
LOGGER.debug(
|
||||
"Migrating unique_id for %s entity from [%s] to [%s]",
|
||||
entity_entry.entity_id,
|
||||
old_unique_id,
|
||||
new_unique_id,
|
||||
)
|
||||
return {
|
||||
"new_unique_id": entity_entry.unique_id.replace(
|
||||
old_unique_id, new_unique_id
|
||||
)
|
||||
}
|
||||
|
||||
return None
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ShellyConfigEntry,
|
||||
@@ -1480,6 +1638,12 @@ async def async_setup_entry(
|
||||
coordinator = config_entry.runtime_data.rpc
|
||||
assert coordinator
|
||||
|
||||
await er.async_migrate_entries(
|
||||
hass,
|
||||
config_entry.entry_id,
|
||||
partial(async_migrate_unique_ids, coordinator),
|
||||
)
|
||||
|
||||
async_setup_entry_rpc(
|
||||
hass, config_entry, async_add_entities, RPC_SENSORS, RpcSensor
|
||||
)
|
||||
@@ -1491,21 +1655,6 @@ async def async_setup_entry(
|
||||
SENSOR_PLATFORM,
|
||||
coordinator.device.status,
|
||||
)
|
||||
|
||||
# the user can remove virtual components from the device configuration, so
|
||||
# we need to remove orphaned entities
|
||||
virtual_component_ids = get_virtual_component_ids(
|
||||
coordinator.device.config, SENSOR_PLATFORM
|
||||
)
|
||||
for component in ("enum", "number", "text"):
|
||||
async_remove_orphaned_entities(
|
||||
hass,
|
||||
config_entry.entry_id,
|
||||
coordinator.mac,
|
||||
SENSOR_PLATFORM,
|
||||
virtual_component_ids,
|
||||
component,
|
||||
)
|
||||
return
|
||||
|
||||
if config_entry.data[CONF_SLEEP_PERIOD]:
|
||||
|
@@ -648,7 +648,10 @@ def get_virtual_component_ids(config: dict[str, Any], platform: str) -> list[str
|
||||
ids.extend(
|
||||
k
|
||||
for k, v in config.items()
|
||||
if k.startswith(comp_type) and v["meta"]["ui"]["view"] in component["modes"]
|
||||
if k.startswith(comp_type)
|
||||
# default to button view if not set, workaround for Wall Display
|
||||
and v.get("meta", {"ui": {"view": "button"}})["ui"]["view"]
|
||||
in component["modes"]
|
||||
)
|
||||
|
||||
return ids
|
||||
@@ -915,3 +918,8 @@ def remove_empty_sub_devices(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
dev_reg.async_update_device(
|
||||
device.id, remove_config_entry_id=entry.entry_id
|
||||
)
|
||||
|
||||
|
||||
def format_ble_addr(ble_addr: str) -> str:
|
||||
"""Format BLE address to use in unique_id."""
|
||||
return ble_addr.replace(":", "").upper()
|
||||
|
@@ -290,7 +290,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up SimpliSafe as config entry."""
|
||||
_async_standardize_config_entry(hass, entry)
|
||||
|
||||
_verify_domain_control = verify_domain_control(hass, DOMAIN)
|
||||
_verify_domain_control = verify_domain_control(DOMAIN)
|
||||
websession = aiohttp_client.async_get_clientsession(hass)
|
||||
|
||||
try:
|
||||
|
@@ -378,7 +378,7 @@ async def root_payload(
|
||||
children.extend(item.children)
|
||||
else:
|
||||
children.append(item)
|
||||
except media_source.BrowseError:
|
||||
except BrowseError:
|
||||
pass
|
||||
|
||||
if len(children) == 1:
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user